commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1009bd3b5e5941aff2f7b3852494ee19f085dcce
|
mediacloud/mediawords/db/exceptions/handler.py
|
mediacloud/mediawords/db/exceptions/handler.py
|
class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
|
class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McSelectException(McDatabaseHandlerException):
"""select() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
|
Add exception to be thrown by select()
|
Add exception to be thrown by select()
|
Python
|
agpl-3.0
|
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
|
class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
Add exception to be thrown by select()
|
class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McSelectException(McDatabaseHandlerException):
"""select() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
|
<commit_before>class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
<commit_msg>Add exception to be thrown by select()<commit_after>
|
class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McSelectException(McDatabaseHandlerException):
"""select() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
|
class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
Add exception to be thrown by select()class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McSelectException(McDatabaseHandlerException):
"""select() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
|
<commit_before>class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
<commit_msg>Add exception to be thrown by select()<commit_after>class McDatabaseHandlerException(Exception):
"""Database handler exception."""
pass
class McConnectException(McDatabaseHandlerException):
"""__connect() exception."""
pass
class McSchemaIsUpToDateException(McDatabaseHandlerException):
"""schema_is_up_to_date() exception."""
pass
class McQueryException(McDatabaseHandlerException):
"""query() exception."""
pass
class McPrimaryKeyColumnException(McDatabaseHandlerException):
"""primary_key_column() exception."""
pass
class McFindByIDException(McDatabaseHandlerException):
"""find_by_id() exception."""
pass
class McRequireByIDException(McDatabaseHandlerException):
"""require_by_id() exception."""
pass
class McUpdateByIDException(McDatabaseHandlerException):
"""update_by_id() exception."""
pass
class McDeleteByIDException(McDatabaseHandlerException):
"""delete_by_id() exception."""
pass
class McCreateException(McDatabaseHandlerException):
"""create() exception."""
pass
class McSelectException(McDatabaseHandlerException):
"""select() exception."""
pass
class McFindOrCreateException(McDatabaseHandlerException):
"""find_or_create() exception."""
pass
class McQuoteException(McDatabaseHandlerException):
"""quote() exception."""
pass
class McPrepareException(McDatabaseHandlerException):
"""prepare() exception."""
pass
class McQueryPagedHashesException(McDatabaseHandlerException):
"""query_paged_hashes() exception."""
pass
|
cb6fa6b54ca3e1908037a1b1a3399d8bd4b1be58
|
djoser/compat.py
|
djoser/compat.py
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
Fix invalid fallback leading to circular calls
|
Fix invalid fallback leading to circular calls
Remove redundant finally
|
Python
|
mit
|
sunscrapers/djoser,akalipetis/djoser,sunscrapers/djoser,sunscrapers/djoser,akalipetis/djoser
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
Fix invalid fallback leading to circular calls
Remove redundant finally
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
<commit_before>from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
<commit_msg>Fix invalid fallback leading to circular calls
Remove redundant finally<commit_after>
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
Fix invalid fallback leading to circular calls
Remove redundant finallyfrom djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
<commit_before>from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError:
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
<commit_msg>Fix invalid fallback leading to circular calls
Remove redundant finally<commit_after>from djoser.conf import settings
try:
from django.contrib.auth.password_validation import validate_password
except ImportError: # pragma: no cover
from password_validation import validate_password
__all__ = ['settings', 'validate_password']
def get_user_email(user):
email_field_name = get_user_email_field_name(user)
return getattr(user, email_field_name, None)
def get_user_email_field_name(user):
try: # Assume we are Django >= 1.11
return user.get_email_field_name()
except AttributeError: # we are using Django < 1.11
return settings.USER_EMAIL_FIELD_NAME
|
8aaeae23db31162677637a41c5343558f3842450
|
tests/test_i19screen.py
|
tests/test_i19screen.py
|
from __future__ import absolute_import, division, print_function
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
|
from __future__ import absolute_import, division, print_function
import mock
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
@mock.patch('i19.command_line.screen.procrunner')
def test_i19screen_calls(procrunner, run_in_tmpdir):
procrunner.run.return_value = {'exitcode': 0, 'runtime': 0}
files = 'dataset.cbf:1:100'
with pytest.raises(SystemExit):
I19Screen().run([files])
procrunner.run.assert_called_once_with([
'dials.import',
'input.template=dataset.cbf',
'geometry.scan.image_range=1,100',
'geometry.scan.extrapolate_scan=True'
],
debug=False,
print_stdout=False,
)
|
Add test for template parsing
|
Add test for template parsing
|
Python
|
bsd-3-clause
|
xia2/i19
|
from __future__ import absolute_import, division, print_function
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
Add test for template parsing
|
from __future__ import absolute_import, division, print_function
import mock
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
@mock.patch('i19.command_line.screen.procrunner')
def test_i19screen_calls(procrunner, run_in_tmpdir):
procrunner.run.return_value = {'exitcode': 0, 'runtime': 0}
files = 'dataset.cbf:1:100'
with pytest.raises(SystemExit):
I19Screen().run([files])
procrunner.run.assert_called_once_with([
'dials.import',
'input.template=dataset.cbf',
'geometry.scan.image_range=1,100',
'geometry.scan.extrapolate_scan=True'
],
debug=False,
print_stdout=False,
)
|
<commit_before>from __future__ import absolute_import, division, print_function
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
<commit_msg>Add test for template parsing<commit_after>
|
from __future__ import absolute_import, division, print_function
import mock
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
@mock.patch('i19.command_line.screen.procrunner')
def test_i19screen_calls(procrunner, run_in_tmpdir):
procrunner.run.return_value = {'exitcode': 0, 'runtime': 0}
files = 'dataset.cbf:1:100'
with pytest.raises(SystemExit):
I19Screen().run([files])
procrunner.run.assert_called_once_with([
'dials.import',
'input.template=dataset.cbf',
'geometry.scan.image_range=1,100',
'geometry.scan.extrapolate_scan=True'
],
debug=False,
print_stdout=False,
)
|
from __future__ import absolute_import, division, print_function
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
Add test for template parsingfrom __future__ import absolute_import, division, print_function
import mock
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
@mock.patch('i19.command_line.screen.procrunner')
def test_i19screen_calls(procrunner, run_in_tmpdir):
procrunner.run.return_value = {'exitcode': 0, 'runtime': 0}
files = 'dataset.cbf:1:100'
with pytest.raises(SystemExit):
I19Screen().run([files])
procrunner.run.assert_called_once_with([
'dials.import',
'input.template=dataset.cbf',
'geometry.scan.image_range=1,100',
'geometry.scan.extrapolate_scan=True'
],
debug=False,
print_stdout=False,
)
|
<commit_before>from __future__ import absolute_import, division, print_function
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
<commit_msg>Add test for template parsing<commit_after>from __future__ import absolute_import, division, print_function
import mock
import pytest
from i19.command_line.screen import I19Screen
def test_i19screen_command_line_help_does_not_crash():
I19Screen().run('')
def test_i19screen(regression_data, run_in_tmpdir):
data_dir = regression_data('X4_wide').strpath
I19Screen().run([data_dir])
logfile = run_in_tmpdir.join('i19.screen.log').read()
assert 'i19.screen successfully completed' in logfile
assert 'photon incidence rate is outside the linear response region' in logfile
@mock.patch('i19.command_line.screen.procrunner')
def test_i19screen_calls(procrunner, run_in_tmpdir):
procrunner.run.return_value = {'exitcode': 0, 'runtime': 0}
files = 'dataset.cbf:1:100'
with pytest.raises(SystemExit):
I19Screen().run([files])
procrunner.run.assert_called_once_with([
'dials.import',
'input.template=dataset.cbf',
'geometry.scan.image_range=1,100',
'geometry.scan.extrapolate_scan=True'
],
debug=False,
print_stdout=False,
)
|
ba6305578ad41519ea5f05296dda9732d538d5b3
|
tests/test_pathutils.py
|
tests/test_pathutils.py
|
from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
Add unit test using mock
|
Add unit test using mock
|
Python
|
mit
|
blitzrk/sublime_libsass,blitzrk/sublime_libsass
|
from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
Add unit test using mock
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
<commit_before>from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
<commit_msg>Add unit test using mock<commit_after>
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
Add unit test using mockfrom os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
<commit_before>from os.path import join
import sublime
import sys
from unittest import TestCase
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
<commit_msg>Add unit test using mock<commit_after>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
if version < '3000':
from libsass import pathutils
else:
from sublime_libsass.libsass import pathutils
class test_pathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch('pathutils.os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
0e5f5716b8a48ef5fc03f8ee5a3eef140304a354
|
jmbo_calendar/views.py
|
jmbo_calendar/views.py
|
from django.utils.translation import ugettext as _
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
try:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
except ImportError:
show_distance = False
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
|
from django.utils.translation import ugettext as _
from jmbo import USE_GIS
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
show_distance = False
if USE_GIS:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
|
Revert to using jmbo gis flag
|
Revert to using jmbo gis flag
|
Python
|
bsd-3-clause
|
praekelt/jmbo-calendar,praekelt/jmbo-calendar
|
from django.utils.translation import ugettext as _
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
try:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
except ImportError:
show_distance = False
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
Revert to using jmbo gis flag
|
from django.utils.translation import ugettext as _
from jmbo import USE_GIS
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
show_distance = False
if USE_GIS:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
|
<commit_before>from django.utils.translation import ugettext as _
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
try:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
except ImportError:
show_distance = False
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
<commit_msg>Revert to using jmbo gis flag<commit_after>
|
from django.utils.translation import ugettext as _
from jmbo import USE_GIS
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
show_distance = False
if USE_GIS:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
|
from django.utils.translation import ugettext as _
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
try:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
except ImportError:
show_distance = False
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
Revert to using jmbo gis flagfrom django.utils.translation import ugettext as _
from jmbo import USE_GIS
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
show_distance = False
if USE_GIS:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
|
<commit_before>from django.utils.translation import ugettext as _
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
try:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
except ImportError:
show_distance = False
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
<commit_msg>Revert to using jmbo gis flag<commit_after>from django.utils.translation import ugettext as _
from jmbo import USE_GIS
from jmbo.views import ObjectList
from jmbo_calendar.models import Event
class ObjectList(ObjectList):
def get_context_data(self, **kwargs):
context = super(ObjectList, self).get_context_data(**kwargs)
show_distance = False
if USE_GIS:
from django.contrib.gis.geos import Point
show_distance = isinstance(
self.request.session['location']['position'], Point
)
context["title"] = _("Events")
context["show_distance"] = show_distance
return context
def get_queryset(self):
qs = Event.coordinator.upcoming()
qs = qs.filter(
location__country=self.request.session['location']['city'].country_id
)
position = self.request.session['location']['position']
if not isinstance(position, Point):
position = self.request.session['location']['city'].coordinates
qs = qs.distance(position).order_by('distance', 'start')
return qs
def get_paginate_by(self, *args, **kwargs):
# todo: needs work in Jmbo to work
return 10
|
407c6ceec878f60aa908ac12dd9cccc4c4dec9b4
|
masters/master.chromium.chrome/master_gatekeeper_cfg.py
|
masters/master.chromium.chrome/master_gatekeeper_cfg.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'update',
'runhooks',
'svnkill',
'taskkill',
'check_deps2git',
'check_deps',
'compile',
'archive_build'
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'steps',
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
|
Modify gatekeeper config for master.chromium.chrome after switch to recipes
|
Modify gatekeeper config for master.chromium.chrome after switch to recipes
BUG=338501
Review URL: https://codereview.chromium.org/131033006
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@249524 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'update',
'runhooks',
'svnkill',
'taskkill',
'check_deps2git',
'check_deps',
'compile',
'archive_build'
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
Modify gatekeeper config for master.chromium.chrome after switch to recipes
BUG=338501
Review URL: https://codereview.chromium.org/131033006
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@249524 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'steps',
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'update',
'runhooks',
'svnkill',
'taskkill',
'check_deps2git',
'check_deps',
'compile',
'archive_build'
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
<commit_msg>Modify gatekeeper config for master.chromium.chrome after switch to recipes
BUG=338501
Review URL: https://codereview.chromium.org/131033006
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@249524 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'steps',
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'update',
'runhooks',
'svnkill',
'taskkill',
'check_deps2git',
'check_deps',
'compile',
'archive_build'
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
Modify gatekeeper config for master.chromium.chrome after switch to recipes
BUG=338501
Review URL: https://codereview.chromium.org/131033006
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@249524 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'steps',
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'update',
'runhooks',
'svnkill',
'taskkill',
'check_deps2git',
'check_deps',
'compile',
'archive_build'
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
<commit_msg>Modify gatekeeper config for master.chromium.chrome after switch to recipes
BUG=338501
Review URL: https://codereview.chromium.org/131033006
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@249524 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
categories_steps = {
'': [
'steps',
],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update', 'svnkill', 'taskkill',
'archive_build', 'start_crash_handler', 'gclient_revert']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
tree_status_url=active_master.tree_status_url,
use_getname=True))
|
dc2f8342bc9b9c921086948ed10f99de9bcbc76d
|
client/python/setup.py
|
client/python/setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff']
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff'],
requires=['requests'],
)
|
Add deps for python lib
|
Add deps for python lib
|
Python
|
agpl-3.0
|
SYNHAK/spiff,SYNHAK/spiff,SYNHAK/spiff
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff']
)
Add deps for python lib
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff'],
requires=['requests'],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff']
)
<commit_msg>Add deps for python lib<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff'],
requires=['requests'],
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff']
)
Add deps for python lib#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff'],
requires=['requests'],
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff']
)
<commit_msg>Add deps for python lib<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(name='Spiff',
version='0.1',
description="API to Spaceman Spiff",
author='Trever Fischer',
author_email='wm161@wm161.net',
url='http://github.com/synhak/spiff',
py_modules=['spiff'],
requires=['requests'],
)
|
9546faddab321eb508f358883faf45cbc7d48dd8
|
calexicon/internal/tests/test_julian.py
|
calexicon/internal/tests/test_julian.py
|
import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
|
import unittest
from datetime import date as vanilla_date
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
|
Correct test - vanilla_date not tuple.
|
Correct test - vanilla_date not tuple.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
Correct test - vanilla_date not tuple.
|
import unittest
from datetime import date as vanilla_date
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
|
<commit_before>import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
<commit_msg>Correct test - vanilla_date not tuple.<commit_after>
|
import unittest
from datetime import date as vanilla_date
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
|
import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
Correct test - vanilla_date not tuple.import unittest
from datetime import date as vanilla_date
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
|
<commit_before>import unittest
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), (1984, 3, 13))
<commit_msg>Correct test - vanilla_date not tuple.<commit_after>import unittest
from datetime import date as vanilla_date
from calexicon.internal.julian import distant_julian_to_gregorian, julian_to_gregorian
class TestJulian(unittest.TestCase):
def test_distant_julian_to_gregorian(self):
self.assertEqual(distant_julian_to_gregorian(9999, 12, 1), (10000, 2, 12))
def test_julian_to_gregorian(self):
self.assertEqual(julian_to_gregorian(1984, 2, 29), vanilla_date(1984, 3, 13))
|
ab15e817d4248f982502440007c7562779cc644b
|
pythonwarrior/ui.py
|
pythonwarrior/ui.py
|
from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.read()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
|
from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.readline()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
|
Change UI gets to use readline instead of read
|
Change UI gets to use readline instead of read
|
Python
|
mit
|
arbylee/python-warrior
|
from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.read()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
Change UI gets to use readline instead of read
|
from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.readline()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
|
<commit_before>from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.read()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
<commit_msg>Change UI gets to use readline instead of read<commit_after>
|
from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.readline()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
|
from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.read()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
Change UI gets to use readline instead of readfrom pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.readline()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
|
<commit_before>from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.read()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
<commit_msg>Change UI gets to use readline instead of read<commit_after>from pythonwarrior.config import Config
import time
class UI(object):
@staticmethod
def puts(msg):
if Config.out_stream:
return Config.out_stream.write(msg + "\n")
@staticmethod
def puts_with_delay(msg):
result = UI.puts(msg)
if Config.delay:
time.sleep(Config.delay)
return result
@staticmethod
def write(msg):
if Config.out_stream:
return Config.out_stream.write(msg)
@staticmethod
def gets():
if Config.in_stream:
return Config.in_stream.readline()
else:
return ''
@staticmethod
def request(msg):
UI.write(msg)
return UI.gets().rstrip()
@staticmethod
def ask(msg):
return UI.request("%s [yn] " % msg) == 'y'
@staticmethod
def choose(item, options):
if len(options) == 1:
response = options[0]
else:
for idx, option in enumerate(options):
if type(option) == list:
UI.puts("[%d] %s" % (idx+1, option[-1]))
else:
UI.puts("[%d] %s" % (idx+1, option))
choice = UI.request("Choose %s by typing the number: " % item)
response = options[int(choice)-1]
if type(response) == list:
return response[0]
else:
return response
|
adb458132b4e633052c9e46e1dc4e67306f9fc6d
|
tikplay/database/models.py
|
tikplay/database/models.py
|
import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
|
import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
def __repr__(self):
return "<Song(song_hash={!r}, filename={!r}, play_count={!r}, artist={!r}, title={!r}, length={!r}, last_played={!r}, date_added={!r})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
def __str__(self):
return "<Song(song_hash={!s}, filename={!s}, play_count={!s}, artist={!s}, title={!s}, length={!s}, last_played={!s}, date_added={!s})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
|
Add __repr__ and __str__ for DB model
|
Add __repr__ and __str__ for DB model
|
Python
|
mit
|
tietokilta-saato/tikplay,tietokilta-saato/tikplay,tietokilta-saato/tikplay,tietokilta-saato/tikplay
|
import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
Add __repr__ and __str__ for DB model
|
import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
def __repr__(self):
return "<Song(song_hash={!r}, filename={!r}, play_count={!r}, artist={!r}, title={!r}, length={!r}, last_played={!r}, date_added={!r})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
def __str__(self):
return "<Song(song_hash={!s}, filename={!s}, play_count={!s}, artist={!s}, title={!s}, length={!s}, last_played={!s}, date_added={!s})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
|
<commit_before>import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
<commit_msg>Add __repr__ and __str__ for DB model<commit_after>
|
import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
def __repr__(self):
return "<Song(song_hash={!r}, filename={!r}, play_count={!r}, artist={!r}, title={!r}, length={!r}, last_played={!r}, date_added={!r})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
def __str__(self):
return "<Song(song_hash={!s}, filename={!s}, play_count={!s}, artist={!s}, title={!s}, length={!s}, last_played={!s}, date_added={!s})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
|
import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
Add __repr__ and __str__ for DB modelimport sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
def __repr__(self):
return "<Song(song_hash={!r}, filename={!r}, play_count={!r}, artist={!r}, title={!r}, length={!r}, last_played={!r}, date_added={!r})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
def __str__(self):
return "<Song(song_hash={!s}, filename={!s}, play_count={!s}, artist={!s}, title={!s}, length={!s}, last_played={!s}, date_added={!s})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
|
<commit_before>import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
<commit_msg>Add __repr__ and __str__ for DB model<commit_after>import sqlalchemy as sa
from database import Base
class Song(Base):
__tablename__ = 'songs'
song_hash = sa.Column(sa.String(40), primary_key=True)
filename = sa.Column(sa.Text, nullable=False)
play_count = sa.Column(sa.Integer, nullable=False)
artist = sa.Column(sa.Text, nullable=True)
title = sa.Column(sa.Text, nullable=True)
length = sa.Column(sa.Integer, nullable=True)
last_played = sa.Column(sa.DateTime, nullable=True)
date_added = sa.Column(sa.DateTime, nullable=True)
def __repr__(self):
return "<Song(song_hash={!r}, filename={!r}, play_count={!r}, artist={!r}, title={!r}, length={!r}, last_played={!r}, date_added={!r})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
def __str__(self):
return "<Song(song_hash={!s}, filename={!s}, play_count={!s}, artist={!s}, title={!s}, length={!s}, last_played={!s}, date_added={!s})>".format(
self.song_hash, self.filename, self.play_count, self.artist,
self.title, self.length, self.last_played, self.date_added)
|
d7087cb309c028bdd56cf4c605d7c60eac3d4c0c
|
utils/custom_context.py
|
utils/custom_context.py
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
Remove 'None' in embed description
|
Remove 'None' in embed description
|
Python
|
mit
|
Naught0/qtbot
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
Remove 'None' in embed description
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
<commit_before>import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
<commit_msg>Remove 'None' in embed description<commit_after>
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
Remove 'None' in embed descriptionimport discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
<commit_before>import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description,
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description,
)
await self.send(embed=em)
<commit_msg>Remove 'None' in embed description<commit_after>import discord
from discord.ext import commands
class CustomContext(commands.Context):
async def error(self, title: str, description: str = None):
em = discord.Embed(
title=f":no_entry_sign: {title}",
color=discord.Color.dark_red(),
description=description or '',
)
await self.send(embed=em)
async def success(self, title: str, description: str = None):
em = discord.Embed(
title=f":white_check_mark: {title}",
color=discord.Color.dark_green(),
description=description or '',
)
await self.send(embed=em)
|
89593cc22f8de4bdb6d605b2e4d6e04b0d1fcd61
|
microcosm_postgres/types.py
|
microcosm_postgres/types.py
|
"""
Custom types.
"""
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
"""
Custom types.
"""
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
Handle non-enum inputs (if they are enum names)
|
Handle non-enum inputs (if they are enum names)
|
Python
|
apache-2.0
|
globality-corp/microcosm-postgres,globality-corp/microcosm-postgres
|
"""
Custom types.
"""
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
Handle non-enum inputs (if they are enum names)
|
"""
Custom types.
"""
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
<commit_before>"""
Custom types.
"""
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
<commit_msg>Handle non-enum inputs (if they are enum names)<commit_after>
|
"""
Custom types.
"""
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
"""
Custom types.
"""
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
Handle non-enum inputs (if they are enum names)"""
Custom types.
"""
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
<commit_before>"""
Custom types.
"""
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
return text_type(self.enum_class(value).name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
<commit_msg>Handle non-enum inputs (if they are enum names)<commit_after>"""
Custom types.
"""
from enum import Enum
from six import text_type
from sqlalchemy.types import TypeDecorator, Unicode
class EnumType(TypeDecorator):
"""
SQLAlchemy enum type that persists the enum name (not value).
Note that this type is very similar to the `ChoiceType` from `sqlalchemy_utils`,
with the key difference being persisting by name (and not value).
"""
impl = Unicode(255)
def __init__(self, enum_class):
self.enum_class = enum_class
@property
def python_type(self):
return self.impl.python_type
def process_bind_param(self, value, dialect):
if value is None:
return None
if isinstance(value, Enum):
return text_type(self.enum_class(value).name)
return text_type(self.enum_class[value].name)
def process_result_value(self, value, dialect):
if value is None:
return None
return self.enum_class[value]
|
c3d4000598a0d8dcbae91d11c36e8361887fa96a
|
storm/zope/schema.py
|
storm/zope/schema.py
|
#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZStormCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZStormCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZStormCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
|
#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
|
Fix docstring and class name
|
Fix docstring and class name
|
Python
|
lgpl-2.1
|
petrhosek/storm,PyMamba/mamba-storm,petrhosek/storm,petrhosek/storm,PyMamba/mamba-storm,PyMamba/mamba-storm
|
#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZStormCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZStormCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZStormCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
Fix docstring and class name
|
#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
|
<commit_before>#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZStormCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZStormCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZStormCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
<commit_msg>Fix docstring and class name<commit_after>
|
#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
|
#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZStormCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZStormCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZStormCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
Fix docstring and class name#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
|
<commit_before>#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZStormCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZStormCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZStormCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
<commit_msg>Fix docstring and class name<commit_after>#
# Copyright (c) 2006, 2007 Canonical
#
# Written by Gustavo Niemeyer <gustavo@niemeyer.net>
#
# This file is part of Storm Object Relational Mapper.
#
# Storm is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation; either version 2.1 of
# the License, or (at your option) any later version.
#
# Storm is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""ZStorm-aware schema manager."""
import transaction
from storm.schema import Schema
class ZCommitter(object):
"""A L{Schema} committer that uses Zope's transaction manager."""
def commit(self):
transaction.commit()
def rollback(self):
transaction.abort()
class ZSchema(Schema):
"""Convenience for creating L{Schema}s that use a L{ZCommitter}."""
def __init__(self, creates, drops, deletes, patch_package):
committer = ZCommitter()
super(ZSchema, self).__init__(creates, drops, deletes, patch_package,
committer)
|
d043b6b019898a8da69662ba3852cca8e887c384
|
tests/TestAssignmentRegex.py
|
tests/TestAssignmentRegex.py
|
import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("../resources/BasicStringAssignment.txt")
cls.int_file = src.main("../resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
|
import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("./resources/BasicStringAssignment.txt")
cls.int_file = src.main("./resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
|
Test changing file path for nosetests
|
Test changing file path for nosetests
|
Python
|
bsd-3-clause
|
sky-uk/bslint
|
import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("../resources/BasicStringAssignment.txt")
cls.int_file = src.main("../resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)Test changing file path for nosetests
|
import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("./resources/BasicStringAssignment.txt")
cls.int_file = src.main("./resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
|
<commit_before>import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("../resources/BasicStringAssignment.txt")
cls.int_file = src.main("../resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)<commit_msg>Test changing file path for nosetests<commit_after>
|
import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("./resources/BasicStringAssignment.txt")
cls.int_file = src.main("./resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
|
import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("../resources/BasicStringAssignment.txt")
cls.int_file = src.main("../resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)Test changing file path for nosetestsimport unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("./resources/BasicStringAssignment.txt")
cls.int_file = src.main("./resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
|
<commit_before>import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("../resources/BasicStringAssignment.txt")
cls.int_file = src.main("../resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)<commit_msg>Test changing file path for nosetests<commit_after>import unittest
import src
import resources.Constants as const
class TestAssignments(unittest.TestCase):
string_file = ''
int_file = ''
@classmethod
def setUpClass(cls):
cls.string_file = src.main("./resources/BasicStringAssignment.txt")
cls.int_file = src.main("./resources/BasicIntegerAssignment.txt")
def testString(self):
test_string = '"test123ID"'
exp_result = [("test123ID", const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
def testVariableAssignmentString(self):
exp_result = [('string', const.ID), ('=', const.STMT), ("words", const.STRING)]
result = src.lexer(self.string_file)
self.assertEqual(result, exp_result)
def testDoubleQuoteString(self):
test_string = '""""'
exp_result = [('""', const.STRING)]
result = src.lexer(test_string)
self.assertEqual(result, exp_result)
|
76f1dee56778fd3ee793604c66d010d900f4440c
|
Python/tigre/utilities/im3Dnorm.py
|
Python/tigre/utilities/im3Dnorm.py
|
import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind is 'TV':
raise Exception('TV not yet implemented')
|
import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind == 'TV':
raise Exception('TV not yet implemented')
|
Fix warnings during installation with python 3.8
|
Fix warnings during installation with python 3.8
The warning was:
> .../lib/python3.8/site-packages/tigre/utilities/im3Dnorm.py:36:
> SyntaxWarning: "is" with a literal. Did you mean "=="?
|
Python
|
bsd-3-clause
|
CERN/TIGRE,CERN/TIGRE,CERN/TIGRE,CERN/TIGRE
|
import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind is 'TV':
raise Exception('TV not yet implemented')
Fix warnings during installation with python 3.8
The warning was:
> .../lib/python3.8/site-packages/tigre/utilities/im3Dnorm.py:36:
> SyntaxWarning: "is" with a literal. Did you mean "=="?
|
import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind == 'TV':
raise Exception('TV not yet implemented')
|
<commit_before>import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind is 'TV':
raise Exception('TV not yet implemented')
<commit_msg>Fix warnings during installation with python 3.8
The warning was:
> .../lib/python3.8/site-packages/tigre/utilities/im3Dnorm.py:36:
> SyntaxWarning: "is" with a literal. Did you mean "=="?<commit_after>
|
import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind == 'TV':
raise Exception('TV not yet implemented')
|
import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind is 'TV':
raise Exception('TV not yet implemented')
Fix warnings during installation with python 3.8
The warning was:
> .../lib/python3.8/site-packages/tigre/utilities/im3Dnorm.py:36:
> SyntaxWarning: "is" with a literal. Did you mean "=="?import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind == 'TV':
raise Exception('TV not yet implemented')
|
<commit_before>import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind is 'TV':
raise Exception('TV not yet implemented')
<commit_msg>Fix warnings during installation with python 3.8
The warning was:
> .../lib/python3.8/site-packages/tigre/utilities/im3Dnorm.py:36:
> SyntaxWarning: "is" with a literal. Did you mean "=="?<commit_after>import numpy as np
def im3DNORM(img,normind,varargin=None):
'''
% IMAGE3DNORM computes the desired image norm
% IMAGE3DNORM(IMG,NORMIND) computes the norm if image IMG using the norm
% defined in NORMING
%
% IMG A 3D image
% NORMIND {non-zero int, inf, -inf, 'fro', 'nuc'}
% 'TV': TV norm
%
%
%--------------------------------------------------------------------------
%--------------------------------------------------------------------------
% This file is part of the TIGRE Toolbox
%
% Copyright (c) 2015, University of Bath and
% CERN-European Organization for Nuclear Research
% All rights reserved.
%
% License: Open Source under BSD.
% See the full license at
% https://github.com/CERN/TIGRE/license.txt
%
% Contact: tigre.toolbox@gmail.com
% Codes: https://github.com/CERN/TIGRE/
% Coded by: Ander Biguri
%--------------------------------------------------------------------------
'''
if normind is [np.inf,-np.inf,'fro','nuc']:
return np.linalg.norm(img.ravel(),normind)
if type(normind) is int:
return np.linalg.norm(img.ravel(),normind)
if normind == 'TV':
raise Exception('TV not yet implemented')
|
02de17ee721d40ddcce9598d8a2ed82930240eda
|
server/python_django/file_uploader/__init__.py
|
server/python_django/file_uploader/__init__.py
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return "{success: true}"
else:
return '{"error": "File is too large."}'
else:
return '{"error": "File has an invalid extension."}'
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
Improve the responses. In the case of the success the json was invalid. It required quotes
|
Improve the responses. In the case of the success the json was invalid. It required quotes
|
Python
|
mit
|
SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,SimonWaldherr/uploader,FineUploader/fine-uploader,FineUploader/fine-uploader,SimonWaldherr/uploader,FineUploader/fine-uploader,SimonWaldherr/uploader
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return "{success: true}"
else:
return '{"error": "File is too large."}'
else:
return '{"error": "File has an invalid extension."}'
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
Improve the responses. In the case of the success the json was invalid. It required quotes
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
<commit_before>"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return "{success: true}"
else:
return '{"error": "File is too large."}'
else:
return '{"error": "File has an invalid extension."}'
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
<commit_msg>Improve the responses. In the case of the success the json was invalid. It required quotes<commit_after>
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return "{success: true}"
else:
return '{"error": "File is too large."}'
else:
return '{"error": "File has an invalid extension."}'
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
Improve the responses. In the case of the success the json was invalid. It required quotes"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
<commit_before>"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return "{success: true}"
else:
return '{"error": "File is too large."}'
else:
return '{"error": "File has an invalid extension."}'
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
<commit_msg>Improve the responses. In the case of the success the json was invalid. It required quotes<commit_after>"""
@author: Ferdinand E. Silva
@email: ferdinandsilva@ferdinandsilva.com
@website: http://ferdinandsilva.com
"""
import os
from django.utils import simplejson as json
class qqFileUploader(object):
def __init__(self, allowedExtensions=None, sizeLimit=1024):
self.allowedExtensions = allowedExtensions or []
self.sizeLimit = sizeLimit
def handleUpload(self, request, uploadDirectory):
#read file info from stream
uploaded = request.read
#get file size
fileSize = int(uploaded.im_self.META["CONTENT_LENGTH"])
#get file name
fileName = uploaded.im_self.META["HTTP_X_FILE_NAME"]
#check first for allowed file extensions
if self._getExtensionFromFileName(fileName) in self.allowedExtensions or ".*" in self.allowedExtensions:
#check file size
if fileSize <= self.sizeLimit:
#upload file
#write file
file = open(os.path.join(uploadDirectory, fileName), "wb+")
file.write(request.read(fileSize))
file.close()
return json.dumps({"success": True})
else:
return json.dumps({"error": "File is too large."})
else:
return json.dumps({"error": "File has an invalid extension."})
def _getExtensionFromFileName(self, fileName):
filename, extension = os.path.splitext(fileName)
return extension
|
afec7b3844493f97d6ba287e1c7b10d272c6f205
|
tests/test_error_handling.py
|
tests/test_error_handling.py
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.json())
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
"""
If we don't get an exception, no reason
to enforce that we get any specific exception.
"""
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
autodoc.json()
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
Change this test to not expect an error.
|
Change this test to not expect an error.
|
Python
|
mit
|
jwg4/flask-autodoc,jwg4/flask-autodoc
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.json())
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
Change this test to not expect an error.
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
"""
If we don't get an exception, no reason
to enforce that we get any specific exception.
"""
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
autodoc.json()
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
<commit_before>import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.json())
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
<commit_msg>Change this test to not expect an error.<commit_after>
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
"""
If we don't get an exception, no reason
to enforce that we get any specific exception.
"""
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
autodoc.json()
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.json())
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
Change this test to not expect an error.import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
"""
If we don't get an exception, no reason
to enforce that we get any specific exception.
"""
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
autodoc.json()
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
<commit_before>import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.json())
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
<commit_msg>Change this test to not expect an error.<commit_after>import unittest
from flask import Flask
from flask_selfdoc import Autodoc
class TestErrorHandling(unittest.TestCase):
def test_app_not_initialized(self):
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
self.assertRaises(RuntimeError, lambda: autodoc.html())
def test_app_not_initialized_json(self):
"""
If we don't get an exception, no reason
to enforce that we get any specific exception.
"""
app = Flask(__name__)
autodoc = Autodoc()
with app.app_context():
autodoc.json()
def test_app_initialized_by_ctor(self):
app = Flask(__name__)
autodoc = Autodoc(app)
with app.app_context():
autodoc.html()
def test_app_initialized_by_init_app(self):
app = Flask(__name__)
autodoc = Autodoc()
autodoc.init_app(app)
with app.app_context():
autodoc.html()
|
af413ab076bb74e93499455bc2fb761e4ec56702
|
scent.py
|
scent.py
|
import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
failure = subprocess.call(command)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
|
import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
os.environ['TEST_IDE'] = '1'
failure = subprocess.call(command, env=os.environ)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
|
Disable tests the launch programs with 'watch'
|
Disable tests the launch programs with 'watch'
|
Python
|
mit
|
jacebrowning/mine
|
import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
failure = subprocess.call(command)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
Disable tests the launch programs with 'watch'
|
import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
os.environ['TEST_IDE'] = '1'
failure = subprocess.call(command, env=os.environ)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
|
<commit_before>import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
failure = subprocess.call(command)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
<commit_msg>Disable tests the launch programs with 'watch'<commit_after>
|
import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
os.environ['TEST_IDE'] = '1'
failure = subprocess.call(command, env=os.environ)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
|
import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
failure = subprocess.call(command)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
Disable tests the launch programs with 'watch'import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
os.environ['TEST_IDE'] = '1'
failure = subprocess.call(command, env=os.environ)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
|
<commit_before>import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
failure = subprocess.call(command)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
<commit_msg>Disable tests the launch programs with 'watch'<commit_after>import os
import time
import subprocess
from sniffer.api import select_runnable, file_validator, runnable
try:
from pync import Notifier
except ImportError:
notify = None
else:
notify = Notifier.notify
watch_paths = ['mine/', 'tests/']
show_coverage = True
@select_runnable('python_tests')
@file_validator
def py_files(filename):
return all((filename.endswith('.py'),
not os.path.basename(filename).startswith('.')))
@runnable
def python_tests(*args):
group = int(time.time()) # unique per run
for count, (command, title) in enumerate((
(('make', 'test-unit'), "Unit Tests"),
(('make', 'test-int'), "Integration Tests"),
(('make', 'test-all'), "Combined Tests"),
(('make', 'check'), "Static Analysis"),
(('make', 'doc'), None),
), start=1):
print("")
print("$ %s" % ' '.join(command))
os.environ['TEST_IDE'] = '1'
failure = subprocess.call(command, env=os.environ)
if failure:
if notify and title:
mark = "❌" * count
notify(mark + " [FAIL] " + mark, title=title, group=group)
return False
else:
if notify and title:
mark = "✅" * count
notify(mark + " [PASS] " + mark, title=title, group=group)
global show_coverage
if show_coverage:
subprocess.call(['make', 'read-coverage'])
show_coverage = False
return True
|
47a3c103344a5d6a558d8a5fa581c3440ca791e6
|
erpnext/patches/4_0/countrywise_coa.py
|
erpnext/patches/4_0/countrywise_coa.py
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
for d in (('Asset', 'Debit', 'No'), ('Liability', 'Credit', 'No'), ('Expense', 'Debit', 'Yes'),
('Income', 'Credit', 'Yes')):
frappe.db.sql("""update `tabAccount` set root_type = %s
where debit_or_credit=%s and is_pl_account=%s""", d)
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account=='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
|
Patch to update old accounts property
|
Patch to update old accounts property
|
Python
|
agpl-3.0
|
anandpdoshi/erpnext,anandpdoshi/erpnext,pombredanne/erpnext,gmarke/erpnext,Drooids/erpnext,suyashphadtare/test,suyashphadtare/vestasi-erp-final,mbauskar/omnitech-erpnext,treejames/erpnext,tmimori/erpnext,susuchina/ERPNEXT,gangadharkadam/smrterp,suyashphadtare/vestasi-update-erp,fuhongliang/erpnext,gsnbng/erpnext,hatwar/buyback-erpnext,Tejal011089/paypal_erpnext,mahabuber/erpnext,mahabuber/erpnext,gangadhar-kadam/verve_test_erp,mbauskar/omnitech-demo-erpnext,Tejal011089/trufil-erpnext,geekroot/erpnext,gangadharkadam/v5_erp,BhupeshGupta/erpnext,gangadhar-kadam/verve_live_erp,mbauskar/omnitech-erpnext,netfirms/erpnext,Tejal011089/huntercamp_erpnext,suyashphadtare/vestasi-erp-jan-end,hernad/erpnext,aruizramon/alec_erpnext,suyashphadtare/vestasi-erp-jan-end,BhupeshGupta/erpnext,indictranstech/tele-erpnext,suyashphadtare/vestasi-erp-jan-end,shft117/SteckerApp,suyashphadtare/vestasi-erp-1,gmarke/erpnext,gangadhar-kadam/laganerp,SPKian/Testing,suyashphadtare/sajil-erp,anandpdoshi/erpnext,sheafferusa/erpnext,indictranstech/internal-erpnext,indictranstech/phrerp,shft117/SteckerApp,indictranstech/erpnext,SPKian/Testing,indictranstech/fbd_erpnext,SPKian/Testing2,suyashphadtare/test,Tejal011089/huntercamp_erpnext,MartinEnder/erpnext-de,tmimori/erpnext,gangadharkadam/sher,gangadhar-kadam/verve-erp,hernad/erpnext,Suninus/erpnext,gangadharkadam/v6_erp,pawaranand/phrerp,ThiagoGarciaAlves/erpnext,indictranstech/focal-erpnext,suyashphadtare/vestasi-update-erp,mbauskar/helpdesk-erpnext,rohitwaghchaure/digitales_erpnext,gangadhar-kadam/latestchurcherp,indictranstech/fbd_erpnext,indictranstech/reciphergroup-erpnext,rohitwaghchaure/erpnext_smart,gangadharkadam/v5_erp,gangadharkadam/office_erp,indictranstech/biggift-erpnext,gmarke/erpnext,gangadhar-kadam/hrerp,hatwar/focal-erpnext,Tejal011089/huntercamp_erpnext,indictranstech/phrerp,rohitwaghchaure/GenieManager-erpnext,meisterkleister/erpnext,njmube/erpnext,gangadhar-kadam/verve_erp,gangadharkadam/contributionerp,gangadhar-kadam/smrterp,ShashaQin/erpnext,indictranstech/trufil-erpnext,netfirms/erpnext,treejames/erpnext,gangadharkadam/contributionerp,ThiagoGarciaAlves/erpnext,hatwar/focal-erpnext,gangadharkadam/saloon_erp_install,mbauskar/internal-hr,indictranstech/buyback-erp,indictranstech/trufil-erpnext,indictranstech/focal-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/reciphergroup-erpnext,Tejal011089/digitales_erpnext,gangadhar-kadam/helpdesk-erpnext,suyashphadtare/sajil-final-erp,rohitwaghchaure/erpnext-receipher,indictranstech/focal-erpnext,mbauskar/Das_Erpnext,gangadharkadam/vlinkerp,suyashphadtare/vestasi-erp-1,gangadhar-kadam/hrerp,gangadharkadam/letzerp,pawaranand/phrerp,geekroot/erpnext,hanselke/erpnext-1,rohitwaghchaure/erpnext_smart,indictranstech/Das_Erpnext,gangadharkadam/v6_erp,Tejal011089/fbd_erpnext,mbauskar/phrerp,SPKian/Testing,gsnbng/erpnext,Drooids/erpnext,gangadharkadam/saloon_erp_install,ShashaQin/erpnext,gangadhar-kadam/verve_erp,Tejal011089/digitales_erpnext,rohitwaghchaure/erpnext-receipher,4commerce-technologies-AG/erpnext,saurabh6790/test-erp,mbauskar/sapphire-erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/v6_erp,gangadhar-kadam/laganerp,susuchina/ERPNEXT,gangadhar-kadam/verve_erp,indictranstech/tele-erpnext,rohitwaghchaure/New_Theme_Erp,gangadharkadam/vlinkerp,mbauskar/alec_frappe5_erpnext,fuhongliang/erpnext,hatwar/Das_erpnext,mahabuber/erpnext,suyashphadtare/vestasi-erp-jan-end,gangadhar-kadam/verve-erp,indictranstech/phrerp,Tejal011089/paypal_erpnext,Suninus/erpnext,gangadharkadam/saloon_erp_install,SPKian/Testing2,BhupeshGupta/erpnext,indictranstech/internal-erpnext,gangadharkadam/office_erp,suyashphadtare/vestasi-update-erp,rohitwaghchaure/New_Theme_Erp,mbauskar/omnitech-demo-erpnext,gangadharkadam/v4_erp,indictranstech/biggift-erpnext,gangadhar-kadam/verve_erp,indictranstech/tele-erpnext,rohitwaghchaure/digitales_erpnext,indictranstech/internal-erpnext,ShashaQin/erpnext,njmube/erpnext,gangadharkadam/v4_erp,gangadhar-kadam/laganerp,indictranstech/buyback-erp,mbauskar/omnitech-erpnext,pombredanne/erpnext,suyashphadtare/gd-erp,pawaranand/phrerp,sagar30051991/ozsmart-erp,hanselke/erpnext-1,hatwar/buyback-erpnext,sheafferusa/erpnext,Tejal011089/trufil-erpnext,indictranstech/osmosis-erpnext,MartinEnder/erpnext-de,anandpdoshi/erpnext,Drooids/erpnext,gangadharkadam/vlinkerp,meisterkleister/erpnext,gangadharkadam/tailorerp,indictranstech/vestasi-erpnext,gangadharkadam/letzerp,gangadharkadam/saloon_erp,gangadharkadam/johnerp,Tejal011089/paypal_erpnext,gangadharkadam/letzerp,gangadharkadam/contributionerp,hatwar/focal-erpnext,mbauskar/phrerp,mbauskar/internal-hr,tmimori/erpnext,mbauskar/helpdesk-erpnext,hatwar/Das_erpnext,indictranstech/tele-erpnext,4commerce-technologies-AG/erpnext,shitolepriya/test-erp,fuhongliang/erpnext,gangadharkadam/v4_erp,indictranstech/erpnext,gsnbng/erpnext,susuchina/ERPNEXT,Drooids/erpnext,indictranstech/vestasi-erpnext,gangadharkadam/saloon_erp,gangadhar-kadam/helpdesk-erpnext,suyashphadtare/vestasi-erp-final,mbauskar/omnitech-erpnext,gangadharkadam/sher,saurabh6790/test-erp,njmube/erpnext,pombredanne/erpnext,suyashphadtare/sajil-final-erp,rohitwaghchaure/New_Theme_Erp,hatwar/buyback-erpnext,aruizramon/alec_erpnext,gangadharkadam/verveerp,gangadharkadam/smrterp,rohitwaghchaure/erpnext-receipher,SPKian/Testing,gangadharkadam/office_erp,gangadharkadam/tailorerp,pombredanne/erpnext,Tejal011089/digitales_erpnext,SPKian/Testing2,Tejal011089/fbd_erpnext,rohitwaghchaure/New_Theme_Erp,gangadhar-kadam/latestchurcherp,geekroot/erpnext,njmube/erpnext,mbauskar/omnitech-demo-erpnext,mbauskar/phrerp,suyashphadtare/sajil-final-erp,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/letzerp,hatwar/buyback-erpnext,gangadharkadam/saloon_erp,aruizramon/alec_erpnext,BhupeshGupta/erpnext,netfirms/erpnext,Tejal011089/fbd_erpnext,rohitwaghchaure/erpnext_smart,indictranstech/phrerp,rohitwaghchaure/digitales_erpnext,indictranstech/osmosis-erpnext,indictranstech/biggift-erpnext,Tejal011089/osmosis_erpnext,indictranstech/buyback-erp,saurabh6790/test-erp,gangadharkadam/sterp,Tejal011089/trufil-erpnext,suyashphadtare/sajil-erp,mbauskar/internal-hr,indictranstech/fbd_erpnext,dieface/erpnext,gangadharkadam/verveerp,suyashphadtare/vestasi-erp-final,indictranstech/reciphergroup-erpnext,gangadharkadam/v4_erp,mbauskar/Das_Erpnext,gmarke/erpnext,gangadharkadam/saloon_erp,sheafferusa/erpnext,gangadharkadam/v5_erp,hernad/erpnext,treejames/erpnext,saurabh6790/test-erp,netfirms/erpnext,indictranstech/osmosis-erpnext,Suninus/erpnext,rohitwaghchaure/erpnext-receipher,gangadhar-kadam/smrterp,indictranstech/Das_Erpnext,indictranstech/buyback-erp,shitolepriya/test-erp,gangadhar-kadam/verve-erp,mbauskar/sapphire-erpnext,hatwar/Das_erpnext,Tejal011089/osmosis_erpnext,sagar30051991/ozsmart-erp,indictranstech/Das_Erpnext,Tejal011089/paypal_erpnext,4commerce-technologies-AG/erpnext,SPKian/Testing2,gangadhar-kadam/verve_test_erp,indictranstech/reciphergroup-erpnext,Tejal011089/osmosis_erpnext,indictranstech/osmosis-erpnext,shitolepriya/test-erp,indictranstech/trufil-erpnext,rohitwaghchaure/GenieManager-erpnext,gangadharkadam/johnerp,gangadhar-kadam/helpdesk-erpnext,rohitwaghchaure/digitales_erpnext,gangadharkadam/vlinkerp,mbauskar/omnitech-demo-erpnext,treejames/erpnext,gangadhar-kadam/verve_test_erp,MartinEnder/erpnext-de,suyashphadtare/gd-erp,shft117/SteckerApp,gangadharkadam/verveerp,gangadharkadam/verveerp,fuhongliang/erpnext,indictranstech/trufil-erpnext,indictranstech/internal-erpnext,sheafferusa/erpnext,indictranstech/biggift-erpnext,gangadharkadam/v6_erp,dieface/erpnext,Aptitudetech/ERPNext,indictranstech/vestasi-erpnext,mbauskar/Das_Erpnext,mbauskar/alec_frappe5_erpnext,gangadhar-kadam/verve_live_erp,mbauskar/alec_frappe5_erpnext,meisterkleister/erpnext,Tejal011089/digitales_erpnext,suyashphadtare/vestasi-erp-1,mbauskar/sapphire-erpnext,mbauskar/sapphire-erpnext,hanselke/erpnext-1,gangadharkadam/sterp,Suninus/erpnext,sagar30051991/ozsmart-erp,gangadhar-kadam/verve_live_erp,gangadhar-kadam/helpdesk-erpnext,indictranstech/fbd_erpnext,mbauskar/helpdesk-erpnext,susuchina/ERPNEXT,mbauskar/phrerp,mahabuber/erpnext,hanselke/erpnext-1,gangadharkadam/contributionerp,mbauskar/Das_Erpnext,indictranstech/erpnext,aruizramon/alec_erpnext,gangadhar-kadam/verve_live_erp,gangadhar-kadam/latestchurcherp,geekroot/erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/fbd_erpnext,Tejal011089/trufil-erpnext,gangadhar-kadam/latestchurcherp,gangadhar-kadam/verve_test_erp,hernad/erpnext,pawaranand/phrerp,tmimori/erpnext,indictranstech/focal-erpnext,suyashphadtare/test,dieface/erpnext,ShashaQin/erpnext,indictranstech/erpnext,indictranstech/Das_Erpnext,gangadharkadam/saloon_erp_install,hatwar/Das_erpnext,gsnbng/erpnext,hatwar/focal-erpnext,Tejal011089/huntercamp_erpnext,shitolepriya/test-erp,sagar30051991/ozsmart-erp,Tejal011089/osmosis_erpnext,shft117/SteckerApp,suyashphadtare/gd-erp,meisterkleister/erpnext,ThiagoGarciaAlves/erpnext,suyashphadtare/sajil-erp,dieface/erpnext,gangadharkadam/v5_erp,suyashphadtare/gd-erp,indictranstech/vestasi-erpnext,mbauskar/helpdesk-erpnext,MartinEnder/erpnext-de
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
for d in (('Asset', 'Debit', 'No'), ('Liability', 'Credit', 'No'), ('Expense', 'Debit', 'Yes'),
('Income', 'Credit', 'Yes')):
frappe.db.sql("""update `tabAccount` set root_type = %s
where debit_or_credit=%s and is_pl_account=%s""", d)
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")Patch to update old accounts property
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account=='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
|
<commit_before># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
for d in (('Asset', 'Debit', 'No'), ('Liability', 'Credit', 'No'), ('Expense', 'Debit', 'Yes'),
('Income', 'Credit', 'Yes')):
frappe.db.sql("""update `tabAccount` set root_type = %s
where debit_or_credit=%s and is_pl_account=%s""", d)
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")<commit_msg>Patch to update old accounts property<commit_after>
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account=='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
for d in (('Asset', 'Debit', 'No'), ('Liability', 'Credit', 'No'), ('Expense', 'Debit', 'Yes'),
('Income', 'Credit', 'Yes')):
frappe.db.sql("""update `tabAccount` set root_type = %s
where debit_or_credit=%s and is_pl_account=%s""", d)
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")Patch to update old accounts property# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account=='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
|
<commit_before># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
for d in (('Asset', 'Debit', 'No'), ('Liability', 'Credit', 'No'), ('Expense', 'Debit', 'Yes'),
('Income', 'Credit', 'Yes')):
frappe.db.sql("""update `tabAccount` set root_type = %s
where debit_or_credit=%s and is_pl_account=%s""", d)
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")<commit_msg>Patch to update old accounts property<commit_after># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.reload_doc("setup", 'doctype', "company")
frappe.reload_doc("accounts", 'doctype', "account")
frappe.db.sql("""update tabAccount set account_type='Fixed Asset'
where account_type='Fixed Asset Account'""")
frappe.db.sql("""update `tabAccount` set report_type =
if(is_pl_account=='Yes', 'Profit and Loss', 'Balance Sheet')""")
frappe.db.sql("""update `tabAccount` set balance_must_be=debit_or_credit
where ifnull(allow_negative_balance, 0) = 0""")
|
eaa99e12ef4b868e825ffe01f4eb9319e439827b
|
examples/face_detection/face_detect.py
|
examples/face_detection/face_detect.py
|
from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[('example', util.download_video())], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(), 'example_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = 'example_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4('example_faces')
print('Successfully generated example_faces.mp4')
|
from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
movie_path = util.download_video() if len(sys.argv) <= 1 else sys.argv[1]
print('Detecting faces in movie {}'.format(movie_path))
movie_name = os.path.splitext(os.path.basename(movie_path))[0]
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[(movie_name, movie_path)], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(),
movie_name + '_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = movie_name + '_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4(movie_name + '_faces')
print('Successfully generated {:s}_faces.mp4'.format(movie_name))
|
Update face detect example to take a path argument
|
Update face detect example to take a path argument
|
Python
|
apache-2.0
|
scanner-research/scanner,scanner-research/scanner,scanner-research/scanner,scanner-research/scanner
|
from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[('example', util.download_video())], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(), 'example_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = 'example_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4('example_faces')
print('Successfully generated example_faces.mp4')
Update face detect example to take a path argument
|
from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
movie_path = util.download_video() if len(sys.argv) <= 1 else sys.argv[1]
print('Detecting faces in movie {}'.format(movie_path))
movie_name = os.path.splitext(os.path.basename(movie_path))[0]
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[(movie_name, movie_path)], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(),
movie_name + '_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = movie_name + '_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4(movie_name + '_faces')
print('Successfully generated {:s}_faces.mp4'.format(movie_name))
|
<commit_before>from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[('example', util.download_video())], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(), 'example_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = 'example_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4('example_faces')
print('Successfully generated example_faces.mp4')
<commit_msg>Update face detect example to take a path argument<commit_after>
|
from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
movie_path = util.download_video() if len(sys.argv) <= 1 else sys.argv[1]
print('Detecting faces in movie {}'.format(movie_path))
movie_name = os.path.splitext(os.path.basename(movie_path))[0]
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[(movie_name, movie_path)], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(),
movie_name + '_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = movie_name + '_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4(movie_name + '_faces')
print('Successfully generated {:s}_faces.mp4'.format(movie_name))
|
from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[('example', util.download_video())], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(), 'example_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = 'example_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4('example_faces')
print('Successfully generated example_faces.mp4')
Update face detect example to take a path argumentfrom scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
movie_path = util.download_video() if len(sys.argv) <= 1 else sys.argv[1]
print('Detecting faces in movie {}'.format(movie_path))
movie_name = os.path.splitext(os.path.basename(movie_path))[0]
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[(movie_name, movie_path)], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(),
movie_name + '_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = movie_name + '_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4(movie_name + '_faces')
print('Successfully generated {:s}_faces.mp4'.format(movie_name))
|
<commit_before>from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[('example', util.download_video())], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(), 'example_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = 'example_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4('example_faces')
print('Successfully generated example_faces.mp4')
<commit_msg>Update face detect example to take a path argument<commit_after>from scannerpy import Database, DeviceType, Job
from scannerpy.stdlib import pipelines
import subprocess
import cv2
import sys
import os.path
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/..')
import util
movie_path = util.download_video() if len(sys.argv) <= 1 else sys.argv[1]
print('Detecting faces in movie {}'.format(movie_path))
movie_name = os.path.splitext(os.path.basename(movie_path))[0]
with Database() as db:
print('Ingesting video into Scanner ...')
[input_table], _ = db.ingest_videos(
[(movie_name, movie_path)], force=True)
print('Detecting faces...')
bboxes_table = pipelines.detect_faces(
db, input_table, lambda t: t.all(),
movie_name + '_bboxes')
print('Drawing faces onto video...')
frame = input_table.as_op().all()
bboxes = bboxes_table.as_op().all()
out_frame = db.ops.DrawBox(frame = frame, bboxes = bboxes)
job = Job(columns = [out_frame], name = movie_name + '_bboxes_overlay')
out_table = db.run(job, force=True)
out_table.column('frame').save_mp4(movie_name + '_faces')
print('Successfully generated {:s}_faces.mp4'.format(movie_name))
|
421fd2d6fc0ffeaf35a99d426c7a1f9914e1da4e
|
weaveserver/services/plugins/__init__.py
|
weaveserver/services/plugins/__init__.py
|
from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
|
from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager", "http"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
|
Add http dependency for plugins.
|
Add http dependency for plugins.
|
Python
|
mit
|
supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer
|
from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
Add http dependency for plugins.
|
from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager", "http"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
|
<commit_before>from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
<commit_msg>Add http dependency for plugins.<commit_after>
|
from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager", "http"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
|
from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
Add http dependency for plugins.from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager", "http"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
|
<commit_before>from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
<commit_msg>Add http dependency for plugins.<commit_after>from .service import PluginService
__meta__ = {
"name": "Plugin Manager",
"class": PluginService,
"deps": ["messaging", "simpledb", "appmanager", "http"],
"config": [
{
"name": "plugins",
"loaders": [
{"type": "env"},
{"type": "sysvarfile"}
]
}
]
}
|
627d79ae4950338c8a5a0d75bae244c9c0374d4a
|
friendlyurls/admin.py
|
friendlyurls/admin.py
|
from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
admin.site.register(UrlMapping, UrlMappingAdmin)
|
from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
search_fields = ('friendly_path','content_type__name')
admin.site.register(UrlMapping, UrlMappingAdmin)
|
Allow searching of vanity urls
|
Allow searching of vanity urls
|
Python
|
bsd-3-clause
|
tachang/django_friendlyurls
|
from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
admin.site.register(UrlMapping, UrlMappingAdmin)
Allow searching of vanity urls
|
from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
search_fields = ('friendly_path','content_type__name')
admin.site.register(UrlMapping, UrlMappingAdmin)
|
<commit_before>from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
admin.site.register(UrlMapping, UrlMappingAdmin)
<commit_msg>Allow searching of vanity urls<commit_after>
|
from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
search_fields = ('friendly_path','content_type__name')
admin.site.register(UrlMapping, UrlMappingAdmin)
|
from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
admin.site.register(UrlMapping, UrlMappingAdmin)
Allow searching of vanity urlsfrom django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
search_fields = ('friendly_path','content_type__name')
admin.site.register(UrlMapping, UrlMappingAdmin)
|
<commit_before>from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
admin.site.register(UrlMapping, UrlMappingAdmin)
<commit_msg>Allow searching of vanity urls<commit_after>from django.contrib import admin
from friendlyurls.models import *
class UrlMappingAdmin(admin.ModelAdmin):
list_display = ('friendly_path', 'resolved_url', 'content_type', 'object')
search_fields = ('friendly_path','content_type__name')
admin.site.register(UrlMapping, UrlMappingAdmin)
|
d0126b7e31c69ea7dd5cda4b9a3e931f5b8a8fbf
|
rest_framework/authtoken/views.py
|
rest_framework/authtoken/views.py
|
from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
def post(self, request):
serializer = AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
|
from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
|
Set serializer_class on ObtainAuthToken view
|
Set serializer_class on ObtainAuthToken view
|
Python
|
bsd-2-clause
|
kennydude/django-rest-framework,mgaitan/django-rest-framework,wedaly/django-rest-framework,arpheno/django-rest-framework,callorico/django-rest-framework,johnraz/django-rest-framework,nryoung/django-rest-framework,krinart/django-rest-framework,gregmuellegger/django-rest-framework,akalipetis/django-rest-framework,tigeraniya/django-rest-framework,ebsaral/django-rest-framework,cheif/django-rest-framework,jpulec/django-rest-framework,ajaali/django-rest-framework,brandoncazander/django-rest-framework,leeahoward/django-rest-framework,atombrella/django-rest-framework,jtiai/django-rest-framework,fishky/django-rest-framework,kgeorgy/django-rest-framework,gregmuellegger/django-rest-framework,ambivalentno/django-rest-framework,tomchristie/django-rest-framework,lubomir/django-rest-framework,abdulhaq-e/django-rest-framework,johnraz/django-rest-framework,d0ugal/django-rest-framework,iheitlager/django-rest-framework,justanr/django-rest-framework,jtiai/django-rest-framework,simudream/django-rest-framework,sehmaschine/django-rest-framework,fishky/django-rest-framework,rubendura/django-rest-framework,kylefox/django-rest-framework,rafaelang/django-rest-framework,ashishfinoit/django-rest-framework,simudream/django-rest-framework,rhblind/django-rest-framework,callorico/django-rest-framework,uruz/django-rest-framework,jpadilla/django-rest-framework,andriy-s/django-rest-framework,jerryhebert/django-rest-framework,hunter007/django-rest-framework,jpulec/django-rest-framework,elim/django-rest-framework,kgeorgy/django-rest-framework,wzbozon/django-rest-framework,callorico/django-rest-framework,linovia/django-rest-framework,ticosax/django-rest-framework,wzbozon/django-rest-framework,wwj718/django-rest-framework,HireAnEsquire/django-rest-framework,maryokhin/django-rest-framework,HireAnEsquire/django-rest-framework,damycra/django-rest-framework,jness/django-rest-framework,jpadilla/django-rest-framework,agconti/django-rest-framework,justanr/django-rest-framework,werthen/django-rest-framework,wangpanjun/django-rest-framework,cheif/django-rest-framework,mgaitan/django-rest-framework,kylefox/django-rest-framework,sbellem/django-rest-framework,zeldalink0515/django-rest-framework,tomchristie/django-rest-framework,justanr/django-rest-framework,qsorix/django-rest-framework,delinhabit/django-rest-framework,cyberj/django-rest-framework,brandoncazander/django-rest-framework,yiyocx/django-rest-framework,lubomir/django-rest-framework,tcroiset/django-rest-framework,kgeorgy/django-rest-framework,edx/django-rest-framework,ebsaral/django-rest-framework,dmwyatt/django-rest-framework,brandoncazander/django-rest-framework,yiyocx/django-rest-framework,uploadcare/django-rest-framework,potpath/django-rest-framework,waytai/django-rest-framework,damycra/django-rest-framework,agconti/django-rest-framework,AlexandreProenca/django-rest-framework,alacritythief/django-rest-framework,ebsaral/django-rest-framework,akalipetis/django-rest-framework,douwevandermeij/django-rest-framework,James1345/django-rest-framework,sehmaschine/django-rest-framework,potpath/django-rest-framework,uruz/django-rest-framework,jerryhebert/django-rest-framework,delinhabit/django-rest-framework,hunter007/django-rest-framework,davesque/django-rest-framework,leeahoward/django-rest-framework,thedrow/django-rest-framework-1,edx/django-rest-framework,maryokhin/django-rest-framework,linovia/django-rest-framework,thedrow/django-rest-framework-1,abdulhaq-e/django-rest-framework,ticosax/django-rest-framework,kylefox/django-rest-framework,jerryhebert/django-rest-framework,wangpanjun/django-rest-framework,iheitlager/django-rest-framework,yiyocx/django-rest-framework,dmwyatt/django-rest-framework,douwevandermeij/django-rest-framework,potpath/django-rest-framework,canassa/django-rest-framework,canassa/django-rest-framework,arpheno/django-rest-framework,d0ugal/django-rest-framework,d0ugal/django-rest-framework,hnakamur/django-rest-framework,rhblind/django-rest-framework,rafaelcaricio/django-rest-framework,kezabelle/django-rest-framework,thedrow/django-rest-framework-1,bluedazzle/django-rest-framework,waytai/django-rest-framework,ambivalentno/django-rest-framework,krinart/django-rest-framework,damycra/django-rest-framework,andriy-s/django-rest-framework,raphaelmerx/django-rest-framework,VishvajitP/django-rest-framework,rafaelcaricio/django-rest-framework,uploadcare/django-rest-framework,rafaelcaricio/django-rest-framework,cyberj/django-rest-framework,ossanna16/django-rest-framework,paolopaolopaolo/django-rest-framework,rubendura/django-rest-framework,sbellem/django-rest-framework,aericson/django-rest-framework,wwj718/django-rest-framework,werthen/django-rest-framework,krinart/django-rest-framework,VishvajitP/django-rest-framework,HireAnEsquire/django-rest-framework,uruz/django-rest-framework,kezabelle/django-rest-framework,abdulhaq-e/django-rest-framework,hnarayanan/django-rest-framework,delinhabit/django-rest-framework,akalipetis/django-rest-framework,aericson/django-rest-framework,tomchristie/django-rest-framework,ossanna16/django-rest-framework,ezheidtmann/django-rest-framework,tigeraniya/django-rest-framework,hnakamur/django-rest-framework,buptlsl/django-rest-framework,vstoykov/django-rest-framework,tcroiset/django-rest-framework,nryoung/django-rest-framework,ambivalentno/django-rest-framework,rhblind/django-rest-framework,raphaelmerx/django-rest-framework,vstoykov/django-rest-framework,jpadilla/django-rest-framework,bluedazzle/django-rest-framework,paolopaolopaolo/django-rest-framework,paolopaolopaolo/django-rest-framework,zeldalink0515/django-rest-framework,MJafarMashhadi/django-rest-framework,antonyc/django-rest-framework,MJafarMashhadi/django-rest-framework,davesque/django-rest-framework,raphaelmerx/django-rest-framework,ticosax/django-rest-framework,sehmaschine/django-rest-framework,xiaotangyuan/django-rest-framework,hunter007/django-rest-framework,YBJAY00000/django-rest-framework,iheitlager/django-rest-framework,simudream/django-rest-framework,hnakamur/django-rest-framework,xiaotangyuan/django-rest-framework,rubendura/django-rest-framework,rafaelang/django-rest-framework,wwj718/django-rest-framework,tigeraniya/django-rest-framework,ajaali/django-rest-framework,jness/django-rest-framework,MJafarMashhadi/django-rest-framework,ashishfinoit/django-rest-framework,cheif/django-rest-framework,adambain-vokal/django-rest-framework,ezheidtmann/django-rest-framework,canassa/django-rest-framework,sheppard/django-rest-framework,YBJAY00000/django-rest-framework,johnraz/django-rest-framework,ezheidtmann/django-rest-framework,lubomir/django-rest-framework,andriy-s/django-rest-framework,davesque/django-rest-framework,uploadcare/django-rest-framework,YBJAY00000/django-rest-framework,buptlsl/django-rest-framework,rafaelang/django-rest-framework,ossanna16/django-rest-framework,atombrella/django-rest-framework,douwevandermeij/django-rest-framework,sheppard/django-rest-framework,VishvajitP/django-rest-framework,sheppard/django-rest-framework,werthen/django-rest-framework,sbellem/django-rest-framework,nhorelik/django-rest-framework,kezabelle/django-rest-framework,jpulec/django-rest-framework,mgaitan/django-rest-framework,jtiai/django-rest-framework,alacritythief/django-rest-framework,qsorix/django-rest-framework,kennydude/django-rest-framework,gregmuellegger/django-rest-framework,jness/django-rest-framework,vstoykov/django-rest-framework,fishky/django-rest-framework,nhorelik/django-rest-framework,James1345/django-rest-framework,alacritythief/django-rest-framework,elim/django-rest-framework,ajaali/django-rest-framework,elim/django-rest-framework,antonyc/django-rest-framework,nryoung/django-rest-framework,ashishfinoit/django-rest-framework,wzbozon/django-rest-framework,AlexandreProenca/django-rest-framework,maryokhin/django-rest-framework,pombredanne/django-rest-framework,edx/django-rest-framework,leeahoward/django-rest-framework,buptlsl/django-rest-framework,pombredanne/django-rest-framework,pombredanne/django-rest-framework,aericson/django-rest-framework,linovia/django-rest-framework,adambain-vokal/django-rest-framework,hnarayanan/django-rest-framework,bluedazzle/django-rest-framework,xiaotangyuan/django-rest-framework,agconti/django-rest-framework,James1345/django-rest-framework,qsorix/django-rest-framework,cyberj/django-rest-framework,dmwyatt/django-rest-framework,zeldalink0515/django-rest-framework,atombrella/django-rest-framework,arpheno/django-rest-framework,kennydude/django-rest-framework,tcroiset/django-rest-framework,waytai/django-rest-framework,AlexandreProenca/django-rest-framework,wangpanjun/django-rest-framework,nhorelik/django-rest-framework,adambain-vokal/django-rest-framework,antonyc/django-rest-framework,hnarayanan/django-rest-framework,wedaly/django-rest-framework,wedaly/django-rest-framework
|
from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
def post(self, request):
serializer = AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
Set serializer_class on ObtainAuthToken view
|
from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
|
<commit_before>from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
def post(self, request):
serializer = AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
<commit_msg>Set serializer_class on ObtainAuthToken view<commit_after>
|
from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
|
from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
def post(self, request):
serializer = AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
Set serializer_class on ObtainAuthToken viewfrom rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
|
<commit_before>from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
def post(self, request):
serializer = AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
<commit_msg>Set serializer_class on ObtainAuthToken view<commit_after>from rest_framework.views import APIView
from rest_framework import parsers
from rest_framework import renderers
from rest_framework.response import Response
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
class ObtainAuthToken(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
def post(self, request):
serializer = self.serializer_class(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
token, created = Token.objects.get_or_create(user=user)
return Response({'token': token.key})
obtain_auth_token = ObtainAuthToken.as_view()
|
4393740af93ae0ac1927e68c422e24735b0216c1
|
infosystem/subsystem/policy/entity.py
|
infosystem/subsystem/policy/entity.py
|
from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id, bypass):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
|
from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id=None, bypass=False):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
|
Make role_id & bypass opt args in Policy __init__
|
Make role_id & bypass opt args in Policy __init__
|
Python
|
apache-2.0
|
samueldmq/infosystem
|
from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id, bypass):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
Make role_id & bypass opt args in Policy __init__
|
from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id=None, bypass=False):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
|
<commit_before>from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id, bypass):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
<commit_msg>Make role_id & bypass opt args in Policy __init__<commit_after>
|
from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id=None, bypass=False):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
|
from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id, bypass):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
Make role_id & bypass opt args in Policy __init__from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id=None, bypass=False):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
|
<commit_before>from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id, bypass):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
<commit_msg>Make role_id & bypass opt args in Policy __init__<commit_after>from sqlalchemy import UniqueConstraint
from infosystem.common.subsystem import entity
from infosystem.database import db
class Policy(entity.Entity, db.Model):
attributes = ['id', 'capability_id', 'role_id', 'bypass']
domain_id = db.Column(db.CHAR(32), db.ForeignKey("domain.id"), nullable=False)
capability_id = db.Column(db.CHAR(32), db.ForeignKey("capability.id"), nullable=False)
role_id = db.Column(db.CHAR(32), db.ForeignKey("role.id"), nullable=True)
bypass = db.Column(db.Boolean, nullable=False, default=False)
__table_args__ = (UniqueConstraint('domain_id', 'capability_id', 'role_id', name='policy_uk'),)
def __init__(self, id, domain_id, capability_id, role_id=None, bypass=False):
super(Policy, self).__init__(id)
self.domain_id = domain_id
self.capability_id = capability_id
self.role_id = role_id
self.bypass = bypass
|
e99a4aa3fab84e112f5d82eafe9012f7e2be9447
|
problem-static/Intro-Eval_50/admin/eval.py
|
problem-static/Intro-Eval_50/admin/eval.py
|
#!/usr/bin/python2.7
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
def main():
print "Hi, welcome to the flag database. We are under construction right now, so you cannot view the flags, or do anything."
while True:
command = raw_input("What would you like to do? ")
try:
result = eval(command)
print "Here is the result of your command: %s" %(result)
except:
print "Invalid command, try again"
main()
|
#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()
|
Fix Intro Eval with Unbuffered Streams
|
Fix Intro Eval with Unbuffered Streams
|
Python
|
mit
|
james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF
|
#!/usr/bin/python2.7
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
def main():
print "Hi, welcome to the flag database. We are under construction right now, so you cannot view the flags, or do anything."
while True:
command = raw_input("What would you like to do? ")
try:
result = eval(command)
print "Here is the result of your command: %s" %(result)
except:
print "Invalid command, try again"
main()
Fix Intro Eval with Unbuffered Streams
|
#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()
|
<commit_before>#!/usr/bin/python2.7
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
def main():
print "Hi, welcome to the flag database. We are under construction right now, so you cannot view the flags, or do anything."
while True:
command = raw_input("What would you like to do? ")
try:
result = eval(command)
print "Here is the result of your command: %s" %(result)
except:
print "Invalid command, try again"
main()
<commit_msg>Fix Intro Eval with Unbuffered Streams<commit_after>
|
#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()
|
#!/usr/bin/python2.7
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
def main():
print "Hi, welcome to the flag database. We are under construction right now, so you cannot view the flags, or do anything."
while True:
command = raw_input("What would you like to do? ")
try:
result = eval(command)
print "Here is the result of your command: %s" %(result)
except:
print "Invalid command, try again"
main()
Fix Intro Eval with Unbuffered Streams#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()
|
<commit_before>#!/usr/bin/python2.7
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
def main():
print "Hi, welcome to the flag database. We are under construction right now, so you cannot view the flags, or do anything."
while True:
command = raw_input("What would you like to do? ")
try:
result = eval(command)
print "Here is the result of your command: %s" %(result)
except:
print "Invalid command, try again"
main()
<commit_msg>Fix Intro Eval with Unbuffered Streams<commit_after>#!/usr/bin/python2.7
import sys
del __builtins__.__dict__['__import__']
del __builtins__.__dict__['reload']
flag = "eval_is_fun"
class UnbufferedStream(object):
def __init__(self, stream):
self.stream = stream
def write(self, data):
self.stream.write(data)
self.stream.flush()
def __getattr__(self, attr):
return getattr(self.stream, attr)
sys.stdout = UnbufferedStream(sys.stdout)
def main():
while True:
print "Welcome to the flag database! We are currently under construction. Please do not hack the flags."
try:
command = str(raw_input("What would you like to do? "))
result = str(eval(command))
print "This is the result: %s" %(result)
except Exception, e:
print "Invalid command!!!! EXITING!!!!!"
return
main()
|
bdeb60d5e82e5eaaaaf805286bae29e9112af307
|
us_ignite/common/management/commands/common_load_fixtures.py
|
us_ignite/common/management/commands/common_load_fixtures.py
|
import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Done!"
|
import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
|
Add initial data for the ``Interest`` model.
|
Add initial data for the ``Interest`` model.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Done!"
Add initial data for the ``Interest`` model.
|
import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
|
<commit_before>import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Done!"
<commit_msg>Add initial data for the ``Interest`` model.<commit_after>
|
import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
|
import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Done!"
Add initial data for the ``Interest`` model.import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
|
<commit_before>import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Done!"
<commit_msg>Add initial data for the ``Interest`` model.<commit_after>import urlparse
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.sites.models import Site
from us_ignite.profiles.models import Interest
INTEREST_LIST = (
('SDN', 'sdn'),
('OpenFlow', 'openflow'),
('Ultra fast', 'ultra-fast'),
('Advanced wireless', 'advanced-wireless'),
('Low-latency', 'low-latency'),
('Local cloud/edge computing', 'local-cloud-edge-computing'),
('Healthcare', 'healthcare'),
('Education & Workforce development', 'education-workforce-development'),
('Energy', 'energy'),
('Transportation', 'transportation'),
('Advanced Manufacturing', 'advanced-manufacturing'),
('Public Safety', 'public-safety'),
)
class Command(BaseCommand):
def handle(self, *args, **options):
parsed_url = urlparse.urlparse(settings.SITE_URL)
Site.objects.all().update(domain=parsed_url.netloc,
name=parsed_url.netloc)
print "Updated site URL."
for name, slug in INTEREST_LIST:
interest, is_new = (Interest.objects
.get_or_create(name=name, slug=slug))
if is_new:
print u'Imported interest: %s' % interest
|
4ce3a0d32c3b1399523483257ad1922705cb3dc9
|
__openerp__.py
|
__openerp__.py
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.3",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
Change version to 1.0.3 (dev)
|
Change version to 1.0.3 (dev)
|
Python
|
agpl-3.0
|
xcgd/account_credit_transfer
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
Change version to 1.0.3 (dev)
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.3",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
<commit_msg>Change version to 1.0.3 (dev)<commit_after>
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.3",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
Change version to 1.0.3 (dev)# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.3",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
<commit_msg>Change version to 1.0.3 (dev)<commit_after># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.3",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
e73795b8ad016bba5b1ab5118a5153085a1e99b0
|
nova/tests/functional/api_sample_tests/test_servers_ips.py
|
nova/tests/functional/api_sample_tests/test_servers_ips.py
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
Make it obvious where we're getting our names from
|
trivial: Make it obvious where we're getting our names from
Change-Id: Ib9aa790c8999e50a2a3587561604ff1e51666f38
Signed-off-by: Stephen Finucane <492121341a95b3c3aab646bed44634f739dd019b@redhat.com>
|
Python
|
apache-2.0
|
mahak/nova,mahak/nova,klmitch/nova,klmitch/nova,klmitch/nova,rahulunair/nova,mahak/nova,rahulunair/nova,openstack/nova,openstack/nova,rahulunair/nova,openstack/nova,klmitch/nova
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
trivial: Make it obvious where we're getting our names from
Change-Id: Ib9aa790c8999e50a2a3587561604ff1e51666f38
Signed-off-by: Stephen Finucane <492121341a95b3c3aab646bed44634f739dd019b@redhat.com>
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
<commit_before># Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
<commit_msg>trivial: Make it obvious where we're getting our names from
Change-Id: Ib9aa790c8999e50a2a3587561604ff1e51666f38
Signed-off-by: Stephen Finucane <492121341a95b3c3aab646bed44634f739dd019b@redhat.com><commit_after>
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
trivial: Make it obvious where we're getting our names from
Change-Id: Ib9aa790c8999e50a2a3587561604ff1e51666f38
Signed-off-by: Stephen Finucane <492121341a95b3c3aab646bed44634f739dd019b@redhat.com># Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
<commit_before># Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
self._verify_response('server-ips-network-resp', {}, response, 200)
<commit_msg>trivial: Make it obvious where we're getting our names from
Change-Id: Ib9aa790c8999e50a2a3587561604ff1e51666f38
Signed-off-by: Stephen Finucane <492121341a95b3c3aab646bed44634f739dd019b@redhat.com><commit_after># Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.tests import fixtures as nova_fixtures
from nova.tests.functional.api_sample_tests import test_servers
class ServersIpsJsonTest(test_servers.ServersSampleBase):
sample_dir = 'server-ips'
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
self._verify_response('server-ips-resp', {}, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
server_uuid = self._post_server()
network_label = nova_fixtures.NeutronFixture.network_1['name']
response = self._do_get('servers/%s/ips/%s' % (
server_uuid, network_label))
self._verify_response('server-ips-network-resp', {}, response, 200)
|
9708e6d01835f597460967b9da47fa4c6d32a907
|
test/goldstandard/benchmark_confirm.py
|
test/goldstandard/benchmark_confirm.py
|
import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
|
import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
|
Set up reading lines from file, print count for testing purposes
|
Set up reading lines from file, print count for testing purposes
|
Python
|
bsd-2-clause
|
yngcan/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,yngcan/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor
|
import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
Set up reading lines from file, print count for testing purposes
|
import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
|
<commit_before>import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
<commit_msg>Set up reading lines from file, print count for testing purposes<commit_after>
|
import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
|
import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
Set up reading lines from file, print count for testing purposesimport sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
|
<commit_before>import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
<commit_msg>Set up reading lines from file, print count for testing purposes<commit_after>import sqlite3 as sql
import os
import sys
import logging
import benchmark
# bmVerify(['final_r7', 'final_r8'], filepath="/home/ysun/disambig/newcode/all/", outdir = "/home/ayu/results_v2/")
# Text Files
txt_file = 'benchmark_errors.txt'
opened_file = open(txt_file, 'U')
log_file = 'benchmark_results.log'
# Logging
logging.basicConfig(filename=log_file, level=logging.DEBUG)
open(log_file, "w")
# Set Up SQL Connections
con = sql.connect('invnum_N_zardoz_with_invpat.sqlite3')
with con:
con_cur = con.cursor()
logging.info("Beginning to query database")
con_cur.execute("CREATE INDEX IF NOT EXISTS index_invnum ON invpat (Invnum)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_lastname ON invpat (Lastname)");
con_cur.execute("CREATE INDEX IF NOT EXISTS index_firstname ON invpat (Firstname)");
count = 0
errors = 0
success = 0
while True:
line_read = opened_file.readline()
# print line_read
if not line_read:
print "EXITING"
break
count = count + 1
if count%100 == 0:
print "starting patent", count
|
943d66dc04ec74a27ae12c46ae1559e585faa716
|
scripts/master/factory/dart/channels.py
|
scripts/master/factory/dart/channels.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.9', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.10', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
Make stable builder pull from 1.10
|
Make stable builder pull from 1.10
R=kasperl@google.com
BUG=
Review URL: https://codereview.chromium.org/1107673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294974 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.9', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Make stable builder pull from 1.10
R=kasperl@google.com
BUG=
Review URL: https://codereview.chromium.org/1107673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294974 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.10', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.9', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Make stable builder pull from 1.10
R=kasperl@google.com
BUG=
Review URL: https://codereview.chromium.org/1107673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294974 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.10', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.9', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
Make stable builder pull from 1.10
R=kasperl@google.com
BUG=
Review URL: https://codereview.chromium.org/1107673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294974 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.10', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.9', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
<commit_msg>Make stable builder pull from 1.10
R=kasperl@google.com
BUG=
Review URL: https://codereview.chromium.org/1107673002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@294974 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class Channel(object):
def __init__(self, name, branch, position, category_postfix, priority):
self.branch = branch
self.builder_postfix = '-' + name
self.category_postfix = category_postfix
self.name = name
self.position = position
self.priority = priority
self.all_deps_path = '/' + branch + '/deps/all.deps'
self.standalone_deps_path = '/' + branch + '/deps/standalone.deps'
self.dartium_deps_path = '/' + branch + '/deps/dartium.deps'
# The channel names are replicated in the slave.cfg files for all
# dart waterfalls. If you change anything here please also change it there.
CHANNELS = [
Channel('be', 'branches/bleeding_edge', 0, '', 4),
Channel('dev', 'trunk', 1, '-dev', 2),
Channel('stable', 'branches/1.10', 2, '-stable', 1),
Channel('integration', 'branches/dartium_integration', 3, '-integration', 3),
]
CHANNELS_BY_NAME = {}
for c in CHANNELS:
CHANNELS_BY_NAME[c.name] = c
|
4e5ef4a04fd0b3b354b187ee6e8e8ef27337ad6f
|
xclib/dbmops.py
|
xclib/dbmops.py
|
import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)]))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k), unutf8(domain_db[k])))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
|
import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)], 'illegal'))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
|
Allow dumping illegal utf-8 contents
|
Allow dumping illegal utf-8 contents
|
Python
|
mit
|
jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth,jsxc/xmpp-cloud-auth
|
import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)]))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k), unutf8(domain_db[k])))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
Allow dumping illegal utf-8 contents
|
import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)], 'illegal'))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
|
<commit_before>import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)]))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k), unutf8(domain_db[k])))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
<commit_msg>Allow dumping illegal utf-8 contents<commit_after>
|
import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)], 'illegal'))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
|
import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)]))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k), unutf8(domain_db[k])))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
Allow dumping illegal utf-8 contentsimport sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)], 'illegal'))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
|
<commit_before>import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)]))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k), unutf8(domain_db[k])))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
<commit_msg>Allow dumping illegal utf-8 contents<commit_after>import sys
import bsddb3
from xclib.utf8 import utf8, unutf8
def perform(args):
domain_db = bsddb3.hashopen(args.domain_db, 'c', 0o600)
if args.get:
print(unutf8(domain_db[utf8(args.get)], 'illegal'))
elif args.put:
domain_db[utf8(args.put[0])] = args.put[1]
elif args.delete:
del domain_db[utf8(args.delete)]
elif args.unload:
for k in list(domain_db.keys()):
print('%s\t%s' % (unutf8(k, 'illegal'), unutf8(domain_db[k], 'illegal')))
# Should work according to documentation, but doesn't
# for k, v in DOMAIN_DB.iteritems():
# print k, '\t', v
elif args.load:
for line in sys.stdin:
k, v = line.rstrip('\r\n').split('\t', 1)
domain_db[utf8(k)] = v
domain_db.close()
# vim: tabstop=8 softtabstop=0 expandtab shiftwidth=4
|
53171f75a64a26dcec91facbdec95b2ed7f74338
|
ironic/drivers/drac.py
|
ironic/drivers/drac.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
Add the PXE VendorPassthru interface to PXEDracDriver
|
Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
(cherry picked from commit 78ec7d5336eb65ff845da7ea9f93d34b402f5a0f)
|
Python
|
apache-2.0
|
SauloAislan/ironic,dims/ironic,NaohiroTamura/ironic,hpproliant/ironic,bacaldwell/ironic,naterh/ironic,redhat-openstack/ironic,NaohiroTamura/ironic,ionutbalutoiu/ironic,SauloAislan/ironic,dims/ironic,openstack/ironic,Tan0/ironic,openstack/ironic,pshchelo/ironic,debayanray/ironic_backup,ionutbalutoiu/ironic,devananda/ironic,pshchelo/ironic,bacaldwell/ironic
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
(cherry picked from commit 78ec7d5336eb65ff845da7ea9f93d34b402f5a0f)
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
<commit_msg>Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
(cherry picked from commit 78ec7d5336eb65ff845da7ea9f93d34b402f5a0f)<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
(cherry picked from commit 78ec7d5336eb65ff845da7ea9f93d34b402f5a0f)#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
<commit_before>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
<commit_msg>Add the PXE VendorPassthru interface to PXEDracDriver
Without the PXE VendorPassthru interface to expose the "pass_deploy_info"
method in the vendor_passthru endpoint of the API the DRAC it can't
continue the deployment after the ramdisk is booted.
Closes-Bug: #1379705
Change-Id: I21042cbb95a486742abfcb430471d01cd73b3a4a
(cherry picked from commit 78ec7d5336eb65ff845da7ea9f93d34b402f5a0f)<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
DRAC Driver for remote system management using Dell Remote Access Card.
"""
from oslo.utils import importutils
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.drac import management
from ironic.drivers.modules.drac import power
from ironic.drivers.modules import pxe
class PXEDracDriver(base.BaseDriver):
"""Drac driver using PXE for deploy."""
def __init__(self):
if not importutils.try_import('pywsman'):
raise exception.DriverLoadError(
driver=self.__class__.__name__,
reason=_('Unable to import pywsman library'))
self.power = power.DracPower()
self.deploy = pxe.PXEDeploy()
self.management = management.DracManagement()
self.vendor = pxe.VendorPassthru()
|
84a2ece10b0e246564fd539eed119f46d44ca74d
|
tests/no_hadoop_bare_image_provider.py
|
tests/no_hadoop_bare_image_provider.py
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, 'nohadoop')
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
import re
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
# encode base image name into name of created test image, to prevent image name clash.
decoration = 'nohadoop_' + re.sub(r"[^A-Za-z0-9]", "_", BASE_IMAGE_NAME)
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, decoration)
|
Use bare image name dependent on base image name
|
Use bare image name dependent on base image name
|
Python
|
apache-2.0
|
prestodb/presto-admin,prestodb/presto-admin
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, 'nohadoop')
Use bare image name dependent on base image name
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
import re
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
# encode base image name into name of created test image, to prevent image name clash.
decoration = 'nohadoop_' + re.sub(r"[^A-Za-z0-9]", "_", BASE_IMAGE_NAME)
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, decoration)
|
<commit_before># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, 'nohadoop')
<commit_msg>Use bare image name dependent on base image name<commit_after>
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
import re
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
# encode base image name into name of created test image, to prevent image name clash.
decoration = 'nohadoop_' + re.sub(r"[^A-Za-z0-9]", "_", BASE_IMAGE_NAME)
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, decoration)
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, 'nohadoop')
Use bare image name dependent on base image name# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
import re
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
# encode base image name into name of created test image, to prevent image name clash.
decoration = 'nohadoop_' + re.sub(r"[^A-Za-z0-9]", "_", BASE_IMAGE_NAME)
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, decoration)
|
<commit_before># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, 'nohadoop')
<commit_msg>Use bare image name dependent on base image name<commit_after># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides bare images for standalone clusters.
"""
import re
from tests.bare_image_provider import TagBareImageProvider
from tests.product.constants import BASE_IMAGES_TAG
from tests.product.constants import BASE_IMAGE_NAME
class NoHadoopBareImageProvider(TagBareImageProvider):
def __init__(self):
# encode base image name into name of created test image, to prevent image name clash.
decoration = 'nohadoop_' + re.sub(r"[^A-Za-z0-9]", "_", BASE_IMAGE_NAME)
super(NoHadoopBareImageProvider, self).__init__(
BASE_IMAGE_NAME, BASE_IMAGE_NAME,
BASE_IMAGES_TAG, decoration)
|
c671301e29e41b0ea7988bad0a5ff4793890ea64
|
readKanjiFileTest.py
|
readKanjiFileTest.py
|
from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items())[0:15]:
print(entry)
if __name__ == '__main__':
main()
|
from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items()):
print(entry)
if __name__ == '__main__':
main()
|
Test now prints out all kanji
|
Test now prints out all kanji
|
Python
|
mit
|
WilliamRayJohnson/kanjiStudier
|
from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items())[0:15]:
print(entry)
if __name__ == '__main__':
main()
Test now prints out all kanji
|
from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items()):
print(entry)
if __name__ == '__main__':
main()
|
<commit_before>from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items())[0:15]:
print(entry)
if __name__ == '__main__':
main()
<commit_msg>Test now prints out all kanji<commit_after>
|
from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items()):
print(entry)
if __name__ == '__main__':
main()
|
from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items())[0:15]:
print(entry)
if __name__ == '__main__':
main()
Test now prints out all kanjifrom readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items()):
print(entry)
if __name__ == '__main__':
main()
|
<commit_before>from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items())[0:15]:
print(entry)
if __name__ == '__main__':
main()
<commit_msg>Test now prints out all kanji<commit_after>from readKanjiFile import *
def main():
testDict = readKanjiFile('kanjiTestFile.txt')
print('There are {} entries in this dictionary.'.format(len(testDict)))
for entry in list(testDict.items()):
print(entry)
if __name__ == '__main__':
main()
|
d1d7684edb6d687206deea75d2ba13194046e376
|
sixquiprend/models/chosen_card.py
|
sixquiprend/models/chosen_card.py
|
from sixquiprend.sixquiprend import app, db
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
from sixquiprend.models.card import Card
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
|
from sixquiprend.sixquiprend import app, db
from sixquiprend.models.card import Card
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
|
Move an import to top
|
Move an import to top
|
Python
|
mit
|
nyddogghr/SixQuiPrend,nyddogghr/SixQuiPrend,nyddogghr/SixQuiPrend,nyddogghr/SixQuiPrend
|
from sixquiprend.sixquiprend import app, db
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
from sixquiprend.models.card import Card
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
Move an import to top
|
from sixquiprend.sixquiprend import app, db
from sixquiprend.models.card import Card
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
|
<commit_before>from sixquiprend.sixquiprend import app, db
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
from sixquiprend.models.card import Card
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
<commit_msg>Move an import to top<commit_after>
|
from sixquiprend.sixquiprend import app, db
from sixquiprend.models.card import Card
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
|
from sixquiprend.sixquiprend import app, db
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
from sixquiprend.models.card import Card
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
Move an import to topfrom sixquiprend.sixquiprend import app, db
from sixquiprend.models.card import Card
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
|
<commit_before>from sixquiprend.sixquiprend import app, db
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
from sixquiprend.models.card import Card
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
<commit_msg>Move an import to top<commit_after>from sixquiprend.sixquiprend import app, db
from sixquiprend.models.card import Card
class ChosenCard(db.Model):
id = db.Column(db.Integer, primary_key=True)
user_id = db.Column(db.Integer, db.ForeignKey('user.id', ondelete="CASCADE"))
game_id = db.Column(db.Integer, db.ForeignKey('game.id', ondelete="CASCADE"))
card_id = db.Column(db.Integer, db.ForeignKey('card.id'))
################################################################################
## Serializer
################################################################################
def serialize(self):
return {
'id': self.id,
'user_id': self.user_id,
'game_id': self.game_id,
'card': Card.find(self.card_id)
}
|
778f5b8f905195a54a09f1e9cc8768e5cabc4dcd
|
navigator/settings/prod.py
|
navigator/settings/prod.py
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
# XXX: This needs to be made longer once it is confirmed it works as desired
SECURE_HSTS_SECONDS = 31536000
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
|
Set the secure flag for both our cookies
|
Set the secure flag for both our cookies
|
Python
|
mit
|
uktrade/navigator,dahfool/navigator,dahfool/navigator,uktrade/navigator,uktrade/navigator,dahfool/navigator,dahfool/navigator,uktrade/navigator
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
# XXX: This needs to be made longer once it is confirmed it works as desired
SECURE_HSTS_SECONDS = 31536000
Set the secure flag for both our cookies
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
|
<commit_before>from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
# XXX: This needs to be made longer once it is confirmed it works as desired
SECURE_HSTS_SECONDS = 31536000
<commit_msg>Set the secure flag for both our cookies<commit_after>
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
# XXX: This needs to be made longer once it is confirmed it works as desired
SECURE_HSTS_SECONDS = 31536000
Set the secure flag for both our cookiesfrom .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
|
<commit_before>from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
# XXX: This needs to be made longer once it is confirmed it works as desired
SECURE_HSTS_SECONDS = 31536000
<commit_msg>Set the secure flag for both our cookies<commit_after>from .base import *
DEBUG = False
ALLOWED_HOSTS = ['selling-online-overseas.export.great.gov.uk']
ADMINS = (('David Downes', 'david@downes.co.uk'),)
MIDDLEWARE_CLASSES += [
'core.middleware.IpRestrictionMiddleware',
]
INSTALLED_APPS += [
'raven.contrib.django.raven_compat'
]
RAVEN_CONFIG = {
'dsn': os.environ.get('SENTRY_DSN'),
}
ip_check = os.environ.get('RESTRICT_IPS', False)
RESTRICT_IPS = ip_check == 'True' or ip_check == '1'
ALLOWED_IPS = []
ALLOWED_IP_RANGES = ['165.225.80.0/22', '193.240.203.32/29']
SECURE_SSL_REDIRECT = True
SECURE_HSTS_SECONDS = 31536000
SESSION_COOKIE_SECURE = True
CSRF_COOKIE_SECURE = True
|
166c1a4dde981d5bd7d20a00c8329d7bbb4a3c00
|
nipype/interfaces/setup.py
|
nipype/interfaces/setup.py
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
Remove reference to non-existing data directory.
|
Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
Python
|
bsd-3-clause
|
arokem/nipype,gerddie/nipype,iglpdc/nipype,pearsonlab/nipype,carlohamalainen/nipype,Leoniela/nipype,glatard/nipype,dgellis90/nipype,satra/NiPypeold,glatard/nipype,mick-d/nipype,carolFrohlich/nipype,blakedewey/nipype,dgellis90/nipype,pearsonlab/nipype,mick-d/nipype_source,FCP-INDI/nipype,dgellis90/nipype,carolFrohlich/nipype,FredLoney/nipype,arokem/nipype,rameshvs/nipype,arokem/nipype,gerddie/nipype,carolFrohlich/nipype,gerddie/nipype,rameshvs/nipype,mick-d/nipype,sgiavasis/nipype,carlohamalainen/nipype,dmordom/nipype,wanderine/nipype,fprados/nipype,grlee77/nipype,carlohamalainen/nipype,blakedewey/nipype,mick-d/nipype_source,Leoniela/nipype,FredLoney/nipype,dgellis90/nipype,sgiavasis/nipype,rameshvs/nipype,blakedewey/nipype,dmordom/nipype,satra/NiPypeold,dmordom/nipype,sgiavasis/nipype,christianbrodbeck/nipype,FredLoney/nipype,grlee77/nipype,fprados/nipype,iglpdc/nipype,iglpdc/nipype,JohnGriffiths/nipype,sgiavasis/nipype,gerddie/nipype,grlee77/nipype,christianbrodbeck/nipype,wanderine/nipype,fprados/nipype,FCP-INDI/nipype,arokem/nipype,JohnGriffiths/nipype,wanderine/nipype,carolFrohlich/nipype,FCP-INDI/nipype,pearsonlab/nipype,glatard/nipype,JohnGriffiths/nipype,blakedewey/nipype,grlee77/nipype,rameshvs/nipype,glatard/nipype,Leoniela/nipype,pearsonlab/nipype,mick-d/nipype_source,FCP-INDI/nipype,mick-d/nipype,iglpdc/nipype,wanderine/nipype,JohnGriffiths/nipype,mick-d/nipype
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
<commit_before>def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
<commit_msg>Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
<commit_before>def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('data')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
<commit_msg>Remove reference to non-existing data directory.
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@513 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('interfaces', parent_package, top_path)
config.add_data_dir('tests')
config.add_data_dir('script_templates')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(**configuration(top_path='').todict())
|
56ac100c8ca357a5600db7a16859cca1483ccb13
|
blueprints/multi_node_kubernetes_cluster/teardown_kubernetes_cluster/teardown_kubernetes_cluster.py
|
blueprints/multi_node_kubernetes_cluster/teardown_kubernetes_cluster/teardown_kubernetes_cluster.py
|
"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()
|
"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
from utilities.run_command import execute_command
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()
resource_dir = '/var/opt/cloudbolt/kubernetes/resource-{}'.format(resource_id)
execute_command('rm -rf {}'.format(RESOURCE_LOCATION))
|
Remove config files from filesystem on teardown
|
Remove config files from filesystem on teardown
[DEV-13843]
|
Python
|
apache-2.0
|
CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge,CloudBoltSoftware/cloudbolt-forge
|
"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()Remove config files from filesystem on teardown
[DEV-13843]
|
"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
from utilities.run_command import execute_command
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()
resource_dir = '/var/opt/cloudbolt/kubernetes/resource-{}'.format(resource_id)
execute_command('rm -rf {}'.format(RESOURCE_LOCATION))
|
<commit_before>"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()<commit_msg>Remove config files from filesystem on teardown
[DEV-13843]<commit_after>
|
"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
from utilities.run_command import execute_command
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()
resource_dir = '/var/opt/cloudbolt/kubernetes/resource-{}'.format(resource_id)
execute_command('rm -rf {}'.format(RESOURCE_LOCATION))
|
"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()Remove config files from filesystem on teardown
[DEV-13843]"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
from utilities.run_command import execute_command
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()
resource_dir = '/var/opt/cloudbolt/kubernetes/resource-{}'.format(resource_id)
execute_command('rm -rf {}'.format(RESOURCE_LOCATION))
|
<commit_before>"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()<commit_msg>Remove config files from filesystem on teardown
[DEV-13843]<commit_after>"""
Teardown the CloudBolt resources (container_orchestrator, environment)
associated with this Kubernetes cluster.
"""
from common.methods import set_progress
from containerorchestrators.kuberneteshandler.models import Kubernetes
from utilities.run_command import execute_command
def run(job, *args, **kwargs):
resource = job.resource_set.first()
container_orchestrator = Kubernetes.objects.get(id=resource.container_orchestrator_id)
environment = container_orchestrator.environment_set.first()
container_orchestrator.delete()
environment.delete()
resource_dir = '/var/opt/cloudbolt/kubernetes/resource-{}'.format(resource_id)
execute_command('rm -rf {}'.format(RESOURCE_LOCATION))
|
85405d242b41b9dd721131c6e3046c47fe86847d
|
pic2map/cli.py
|
pic2map/cli.py
|
# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
args = parser.parse_args(argv)
return args
if __name__ == '__main__':
main(sys.argv[1:])
|
# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import logging
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def configure_logging(log_level):
"""Configure logging based on command line argument.
:param log_level: Log level passed form the command line
:type log_level: int
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Log to sys.stderr using log level
# passed through command line
log_handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
log_handler.setFormatter(formatter)
log_handler.setLevel(log_level)
root_logger.addHandler(log_handler)
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
log_levels = ['debug', 'info', 'warning', 'error', 'critical']
parser.add_argument(
'-l', '--log-level',
dest='log_level',
choices=log_levels,
default='warning',
help=('Log level. One of {0} or {1} '
'(%(default)s by default)'
.format(', '.join(log_levels[:-1]), log_levels[-1])))
args = parser.parse_args(argv)
args.log_level = getattr(logging, args.log_level.upper())
return args
if __name__ == '__main__':
main(sys.argv[1:])
|
Add command line option to set log level
|
Add command line option to set log level
|
Python
|
mit
|
jcollado/pic2map,jcollado/pic2map,jcollado/pic2map
|
# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
args = parser.parse_args(argv)
return args
if __name__ == '__main__':
main(sys.argv[1:])
Add command line option to set log level
|
# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import logging
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def configure_logging(log_level):
"""Configure logging based on command line argument.
:param log_level: Log level passed form the command line
:type log_level: int
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Log to sys.stderr using log level
# passed through command line
log_handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
log_handler.setFormatter(formatter)
log_handler.setLevel(log_level)
root_logger.addHandler(log_handler)
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
log_levels = ['debug', 'info', 'warning', 'error', 'critical']
parser.add_argument(
'-l', '--log-level',
dest='log_level',
choices=log_levels,
default='warning',
help=('Log level. One of {0} or {1} '
'(%(default)s by default)'
.format(', '.join(log_levels[:-1]), log_levels[-1])))
args = parser.parse_args(argv)
args.log_level = getattr(logging, args.log_level.upper())
return args
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before># -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
args = parser.parse_args(argv)
return args
if __name__ == '__main__':
main(sys.argv[1:])
<commit_msg>Add command line option to set log level<commit_after>
|
# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import logging
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def configure_logging(log_level):
"""Configure logging based on command line argument.
:param log_level: Log level passed form the command line
:type log_level: int
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Log to sys.stderr using log level
# passed through command line
log_handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
log_handler.setFormatter(formatter)
log_handler.setLevel(log_level)
root_logger.addHandler(log_handler)
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
log_levels = ['debug', 'info', 'warning', 'error', 'critical']
parser.add_argument(
'-l', '--log-level',
dest='log_level',
choices=log_levels,
default='warning',
help=('Log level. One of {0} or {1} '
'(%(default)s by default)'
.format(', '.join(log_levels[:-1]), log_levels[-1])))
args = parser.parse_args(argv)
args.log_level = getattr(logging, args.log_level.upper())
return args
if __name__ == '__main__':
main(sys.argv[1:])
|
# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
args = parser.parse_args(argv)
return args
if __name__ == '__main__':
main(sys.argv[1:])
Add command line option to set log level# -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import logging
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def configure_logging(log_level):
"""Configure logging based on command line argument.
:param log_level: Log level passed form the command line
:type log_level: int
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Log to sys.stderr using log level
# passed through command line
log_handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
log_handler.setFormatter(formatter)
log_handler.setLevel(log_level)
root_logger.addHandler(log_handler)
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
log_levels = ['debug', 'info', 'warning', 'error', 'critical']
parser.add_argument(
'-l', '--log-level',
dest='log_level',
choices=log_levels,
default='warning',
help=('Log level. One of {0} or {1} '
'(%(default)s by default)'
.format(', '.join(log_levels[:-1]), log_levels[-1])))
args = parser.parse_args(argv)
args.log_level = getattr(logging, args.log_level.upper())
return args
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before># -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
args = parser.parse_args(argv)
return args
if __name__ == '__main__':
main(sys.argv[1:])
<commit_msg>Add command line option to set log level<commit_after># -*- coding: utf-8 -*-
"""Command Line Interface."""
import argparse
import logging
import sys
def main(argv):
"""Entry point for the pic2map.py script."""
args = parse_arguments(argv)
print args
def configure_logging(log_level):
"""Configure logging based on command line argument.
:param log_level: Log level passed form the command line
:type log_level: int
"""
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG)
# Log to sys.stderr using log level
# passed through command line
log_handler = logging.StreamHandler()
formatter = logging.Formatter('%(levelname)s: %(message)s')
log_handler.setFormatter(formatter)
log_handler.setLevel(log_level)
root_logger.addHandler(log_handler)
def parse_arguments(argv):
"""Parse command line arguments.
:returns: Parsed arguments
:rtype: argparse.Namespace
"""
parser = argparse.ArgumentParser(
description='Display pictures location in a map')
log_levels = ['debug', 'info', 'warning', 'error', 'critical']
parser.add_argument(
'-l', '--log-level',
dest='log_level',
choices=log_levels,
default='warning',
help=('Log level. One of {0} or {1} '
'(%(default)s by default)'
.format(', '.join(log_levels[:-1]), log_levels[-1])))
args = parser.parse_args(argv)
args.log_level = getattr(logging, args.log_level.upper())
return args
if __name__ == '__main__':
main(sys.argv[1:])
|
f1d2bb08dfde9402b7fc858d57130b43e3f1cd4d
|
bootstrap/hooks.py
|
bootstrap/hooks.py
|
# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = abspath(join(BOOTSTRAP, pardir))
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
|
# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = join(BOOTSTRAP, pardir)
VIRTUALENV_BIN = join(VIRTUALENV, 'bin')
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV_BIN)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
|
Fix wrong destination for postactivate file.
|
Fix wrong destination for postactivate file.
|
Python
|
mit
|
henriquebastos/virtualenv-bootstrap,henriquebastos/virtualenv-bootstrap
|
# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = abspath(join(BOOTSTRAP, pardir))
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
Fix wrong destination for postactivate file.
|
# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = join(BOOTSTRAP, pardir)
VIRTUALENV_BIN = join(VIRTUALENV, 'bin')
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV_BIN)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
|
<commit_before># coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = abspath(join(BOOTSTRAP, pardir))
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
<commit_msg>Fix wrong destination for postactivate file.<commit_after>
|
# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = join(BOOTSTRAP, pardir)
VIRTUALENV_BIN = join(VIRTUALENV, 'bin')
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV_BIN)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
|
# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = abspath(join(BOOTSTRAP, pardir))
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
Fix wrong destination for postactivate file.# coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = join(BOOTSTRAP, pardir)
VIRTUALENV_BIN = join(VIRTUALENV, 'bin')
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV_BIN)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
|
<commit_before># coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = abspath(join(BOOTSTRAP, pardir))
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
<commit_msg>Fix wrong destination for postactivate file.<commit_after># coding: utf-8
from os.path import join, dirname, pardir, abspath
from shutil import copy
import subprocess
BOOTSTRAP = abspath(dirname(__file__))
ROOT = abspath(join(BOOTSTRAP, pardir))
# Path where venv will be created. It's imported by bootstrapX.Y.py
VIRTUALENV = join(BOOTSTRAP, pardir)
VIRTUALENV_BIN = join(VIRTUALENV, 'bin')
ACTIVATE = join(VIRTUALENV, 'bin', 'activate_this.py')
WITH_VENV = join(BOOTSTRAP, 'with_venv.sh')
def with_venv(*args):
"""
Runs the given command inside virtualenv.
"""
cmd = list(args)
cmd.insert(0, WITH_VENV)
return subprocess.call(cmd)
def after_install(options, home_dir):
copy(join(BOOTSTRAP, 'postactivate'), VIRTUALENV_BIN)
with_venv('pip', 'install', '-r', join(ROOT, 'requirements.txt'))
print "Done! Activate your virtualenv: source bin/activate"
|
8e72ef3fa525c961786e9b60c039c847bc2c710f
|
caSandbox.py
|
caSandbox.py
|
import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
i = 0
while True:
# Check for exit key
char = screen.getch()
if char == ord('q'):
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
|
import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
while True:
# Check for exit key
char = screen.getch()
if char != -1:
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
|
Make program close on any keypress
|
Make program close on any keypress
|
Python
|
mit
|
cferwin/CA-Sandbox
|
import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
i = 0
while True:
# Check for exit key
char = screen.getch()
if char == ord('q'):
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
Make program close on any keypress
|
import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
while True:
# Check for exit key
char = screen.getch()
if char != -1:
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
|
<commit_before>import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
i = 0
while True:
# Check for exit key
char = screen.getch()
if char == ord('q'):
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
<commit_msg>Make program close on any keypress<commit_after>
|
import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
while True:
# Check for exit key
char = screen.getch()
if char != -1:
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
|
import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
i = 0
while True:
# Check for exit key
char = screen.getch()
if char == ord('q'):
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
Make program close on any keypressimport map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
while True:
# Check for exit key
char = screen.getch()
if char != -1:
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
|
<commit_before>import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
i = 0
while True:
# Check for exit key
char = screen.getch()
if char == ord('q'):
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
<commit_msg>Make program close on any keypress<commit_after>import map
import curses
# Set up Curses screen
screen = curses.initscr()
curses.noecho()
screen.keypad(True)
curses.cbreak()
curses.halfdelay(5) # Wait for half a second for input before continuing
curses.start_color()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_WHITE, curses.COLOR_BLACK)
# Initialize the map
m = map.Map(screen, "data/test_data.txt")
while True:
# Check for exit key
char = screen.getch()
if char != -1:
break
# Advance the simulation
m.print_cells(x=10, y=10)
m.update_cells()
# Clean up
curses.nocbreak()
screen.keypad(False)
curses.echo()
curses.endwin()
|
bf38a26ea239ce70fd4fc3748912b243fb1f7d88
|
tools/perf/benchmarks/pica.py
|
tools/perf/benchmarks/pica.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
# Needed for native custom elements (document.register)
options.AppendExtraBrowserArg('--enable-experimental-web-platform-features')
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
|
Enable native custom elements for Pica benchmark
|
Enable native custom elements for Pica benchmark
R=tonyg@chromium.org
BUG=245358
Review URL: https://codereview.chromium.org/22884003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217042 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Chilledheart/chromium,mogoweb/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,hgl888/chromium-crosswalk,ltilve/chromium,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,ltilve/chromium,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,jaruba/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,ltilve/chromium,markYoungH/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,anirudhSK/chromium,Jonekee/chromium.src,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,chuan9/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,littlstar/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,axinging/chromium-crosswalk,ChromiumWebApps/chromium,Chilledheart/chromium,jaruba/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,chuan9/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,ondra-novak/chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,patrickm/chromium.src,Jonekee/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,patrickm/chromium.src,dednal/chromium.src,M4sse/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,dednal/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,littlstar/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,dednal/chromium.src,littlstar/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,dushu1203/chromium.src,ChromiumWebApps/chromium,anirudhSK/chromium,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,ltilve/chromium,M4sse/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,M4sse/chromium.src,dednal/chromium.src,anirudhSK/chromium,jaruba/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,anirudhSK/chromium,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
Enable native custom elements for Pica benchmark
R=tonyg@chromium.org
BUG=245358
Review URL: https://codereview.chromium.org/22884003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217042 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
# Needed for native custom elements (document.register)
options.AppendExtraBrowserArg('--enable-experimental-web-platform-features')
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
<commit_msg>Enable native custom elements for Pica benchmark
R=tonyg@chromium.org
BUG=245358
Review URL: https://codereview.chromium.org/22884003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217042 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
# Needed for native custom elements (document.register)
options.AppendExtraBrowserArg('--enable-experimental-web-platform-features')
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
Enable native custom elements for Pica benchmark
R=tonyg@chromium.org
BUG=245358
Review URL: https://codereview.chromium.org/22884003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217042 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
# Needed for native custom elements (document.register)
options.AppendExtraBrowserArg('--enable-experimental-web-platform-features')
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
<commit_msg>Enable native custom elements for Pica benchmark
R=tonyg@chromium.org
BUG=245358
Review URL: https://codereview.chromium.org/22884003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@217042 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import test
from telemetry.page import page_measurement
class PicaMeasurement(page_measurement.PageMeasurement):
def CustomizeBrowserOptions(self, options):
# Needed for native custom elements (document.register)
options.AppendExtraBrowserArg('--enable-experimental-web-platform-features')
def MeasurePage(self, _, tab, results):
result = int(tab.EvaluateJavaScript('__pica_load_time'))
results.Add('Total', 'ms', result)
class Pica(test.Test):
test = PicaMeasurement
page_set = 'page_sets/pica.json'
|
2f72f75da7ba03e331927c5ab0a5702c150b2f9a
|
perfrunner/celeryremote.py
|
perfrunner/celeryremote.py
|
BROKER_URL = 'amqp://couchbase:couchbase@ci.sc.couchbase.com:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
Use broker IP instead of domain name
|
Use broker IP instead of domain name
Change-Id: Ide27c97a00c18ac62c1a92e2ec51c74c5af4cf30
Reviewed-on: http://review.couchbase.org/81029
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
Python
|
apache-2.0
|
couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner
|
BROKER_URL = 'amqp://couchbase:couchbase@ci.sc.couchbase.com:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
Use broker IP instead of domain name
Change-Id: Ide27c97a00c18ac62c1a92e2ec51c74c5af4cf30
Reviewed-on: http://review.couchbase.org/81029
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>
|
BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
<commit_before>BROKER_URL = 'amqp://couchbase:couchbase@ci.sc.couchbase.com:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
<commit_msg>Use broker IP instead of domain name
Change-Id: Ide27c97a00c18ac62c1a92e2ec51c74c5af4cf30
Reviewed-on: http://review.couchbase.org/81029
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com><commit_after>
|
BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
BROKER_URL = 'amqp://couchbase:couchbase@ci.sc.couchbase.com:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
Use broker IP instead of domain name
Change-Id: Ide27c97a00c18ac62c1a92e2ec51c74c5af4cf30
Reviewed-on: http://review.couchbase.org/81029
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com>BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
<commit_before>BROKER_URL = 'amqp://couchbase:couchbase@ci.sc.couchbase.com:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
<commit_msg>Use broker IP instead of domain name
Change-Id: Ide27c97a00c18ac62c1a92e2ec51c74c5af4cf30
Reviewed-on: http://review.couchbase.org/81029
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Pavel Paulau <dd88eded64e90046a680e3a6c0828ceb8fe8a0e7@gmail.com><commit_after>BROKER_URL = 'amqp://couchbase:couchbase@172.23.97.73:5672/broker'
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_EXCHANGE = 'perf_results'
CELERY_RESULT_PERSISTENT = False
|
a5bc36df3435258fad9700c150985998e9663ff9
|
haas/tests/test_coverage.py
|
haas/tests/test_coverage.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
except ImportError:
coverage = None
from mock import Mock, patch
from ..coverage import Coverage
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
from ..coverage import Coverage
except ImportError:
coverage = None
Coverage = None
from mock import Mock, patch
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
|
Fix test error when coverage is not installed
|
Fix test error when coverage is not installed
|
Python
|
bsd-3-clause
|
itziakos/haas,scalative/haas,sjagoe/haas,sjagoe/haas,scalative/haas,itziakos/haas
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
except ImportError:
coverage = None
from mock import Mock, patch
from ..coverage import Coverage
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
Fix test error when coverage is not installed
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
from ..coverage import Coverage
except ImportError:
coverage = None
Coverage = None
from mock import Mock, patch
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
except ImportError:
coverage = None
from mock import Mock, patch
from ..coverage import Coverage
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
<commit_msg>Fix test error when coverage is not installed<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
from ..coverage import Coverage
except ImportError:
coverage = None
Coverage = None
from mock import Mock, patch
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
except ImportError:
coverage = None
from mock import Mock, patch
from ..coverage import Coverage
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
Fix test error when coverage is not installed# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
from ..coverage import Coverage
except ImportError:
coverage = None
Coverage = None
from mock import Mock, patch
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
except ImportError:
coverage = None
from mock import Mock, patch
from ..coverage import Coverage
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
<commit_msg>Fix test error when coverage is not installed<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
try:
import coverage
from ..coverage import Coverage
except ImportError:
coverage = None
Coverage = None
from mock import Mock, patch
from ..testing import unittest
@unittest.skipIf(coverage is None, 'Coverage is not installed')
class TestCoverage(unittest.TestCase):
@patch('coverage.coverage')
def test_coverage(self, coverage_func):
coverage_object = Mock()
coverage_func.return_value = coverage_object
coverage_object.start = Mock()
coverage_object.stop = Mock()
coverage_object.save = Mock()
cov = Coverage()
coverage_func.assert_called_once_with()
cov.setup()
coverage_object.start.assert_called_once_with()
self.assertFalse(coverage_object.stop.called)
self.assertFalse(coverage_object.save.called)
cov.teardown()
coverage_object.stop.assert_called_once_with()
coverage_object.save.assert_called_once_with()
|
5b155b6e6b09874d7783135528d39ab7bbc61fdb
|
config/__init__.py
|
config/__init__.py
|
import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, Config, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in keys {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return Config() # return empty 'dict' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
|
import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, self.__class__, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return self.__class__() # return empty 'Config' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
|
Use dynamic self.__class__ and not name directly
|
Use dynamic self.__class__ and not name directly
|
Python
|
mit
|
FichteFoll/TelegramIRCImageProxy,codetalkio/TelegramIRCImageProxy,FichteFoll/CodetalkIRCBot
|
import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, Config, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in keys {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return Config() # return empty 'dict' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
Use dynamic self.__class__ and not name directly
|
import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, self.__class__, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return self.__class__() # return empty 'Config' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
|
<commit_before>import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, Config, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in keys {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return Config() # return empty 'dict' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
<commit_msg>Use dynamic self.__class__ and not name directly<commit_after>
|
import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, self.__class__, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return self.__class__() # return empty 'Config' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
|
import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, Config, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in keys {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return Config() # return empty 'dict' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
Use dynamic self.__class__ and not name directlyimport logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, self.__class__, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return self.__class__() # return empty 'Config' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
|
<commit_before>import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, Config, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in keys {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return Config() # return empty 'dict' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
<commit_msg>Use dynamic self.__class__ and not name directly<commit_after>import logging
import yaml
l = logging.getLogger(__name__)
def _replace_with_type(type_, replace_type, data):
if isinstance(data, type_):
return replace_type(data)
return data
class Config(dict):
def __init__(self, items=None):
if items is not None:
if hasattr(items, 'items'):
items = list(items.items())
for i, (k, v) in enumerate(items):
items[i] = (k, _replace_with_type(dict, self.__class__, v))
super().__init__(items)
else:
super().__init__()
def __getattr__(self, key):
if key in self:
return self[key]
else:
l.warn("AttrDict: did not find key '{}' in {}", key, self.keys())
if l.getEffectiveLevel() <= logging.INFO:
import inspect
stack = inspect.stack(1)[1:]
l.info("-- AttrDict stack --")
for info in reversed(stack):
l.info(' File "{0[1]}", line {0[2]}, in {0[3]} -- {1}',
info, info[4][-1].strip())
l.info("-- AttrDict stack -- end")
return self.__class__() # return empty 'Config' as default
def read_file(filename):
l.debug("reading config file: '{}'", filename)
with open(filename) as f:
config = Config(yaml.safe_load(f))
l.debug("config: {!s}", config)
return config
|
49bf8bd8137928a1dc5165f38f8abfe423f5e7f0
|
pi_director/controllers/user_controls.py
|
pi_director/controllers/user_controls.py
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
Create the user if it isn't already in the database first, then make it an admin.
|
Create the user if it isn't already in the database first, then make it an admin.
|
Python
|
mit
|
selfcommit/pi_director,PeterGrace/pi_director,selfcommit/pi_director,PeterGrace/pi_director,PeterGrace/pi_director,selfcommit/pi_director
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
Create the user if it isn't already in the database first, then make it an admin.
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
<commit_before>from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
<commit_msg>Create the user if it isn't already in the database first, then make it an admin.<commit_after>
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
Create the user if it isn't already in the database first, then make it an admin.from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
<commit_before>from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
user.AccessLevel=2
DBSession.flush()
return True
<commit_msg>Create the user if it isn't already in the database first, then make it an admin.<commit_after>from pyramid.response import Response
from pi_director.models.models import (
DBSession,
MyModel,
)
from pi_director.models.UserModel import UserModel
def authorize_user(email):
user=DBSession.query(UserModel).filter(UserModel.email==email).one()
user.AccessLevel=2
DBSession.flush()
def delete_user(email):
DBSession.query(UserModel).filter(UserModel.email==email).delete()
def get_users():
UserList=DBSession.query(UserModel).all()
return UserList
def make_an_admin(request):
email=request.matchdict['email']
'''First, make sure there aren't already admins in the system'''
res=DBSession.query(UserModel).filter(UserModel.AccessLevel==2).first()
if res != None:
msg="User already an admin: {user}".format(user=res.email)
return False
user=DBSession.query(UserModel).filter(UserModel.email==email).first()
if user == None:
user=UserModel()
user.email=email
DBSession.add(user)
user.AccessLevel=2
DBSession.flush()
return True
|
6d450dccc7e89e4e90fd1f0f27cdf2aa67166859
|
conanfile.py
|
conanfile.py
|
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
|
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
|
Use collect_libs for finding libs
|
Use collect_libs for finding libs
|
Python
|
lgpl-2.1
|
Hiradur/mysocketw,Hiradur/mysocketw
|
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
Use collect_libs for finding libs
|
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
|
<commit_before>from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
<commit_msg>Use collect_libs for finding libs<commit_after>
|
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
|
from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
Use collect_libs for finding libsfrom conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
|
<commit_before>from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = ["SocketW"]
<commit_msg>Use collect_libs for finding libs<commit_after>from conans import ConanFile, CMake
class SocketwConan(ConanFile):
name = "SocketW"
version = "3.10.36"
license = "GNU Lesser General Public License v2.1"
url = "https://github.com/RigsOfRods/socketw/issues"
description = "SocketW is a library which provides cross-platform socket abstraction"
settings = "os", "compiler", "build_type", "arch"
#options = {"shared": [True, False]}
#default_options = "shared=False"
generators = "cmake"
exports_sources = "src/*", "CMakeLists.txt", "LICENSE", "README"
def requirements(self):
self.requires.add('OpenSSL/1.0.2@conan/stable')
def build(self):
cmake = CMake(self)
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
|
445db7dedb7c60edc03caa1969dca4253e2b9048
|
example_project/example_project/web/templatetags/testags.py
|
example_project/example_project/web/templatetags/testags.py
|
# -*- coding: utf-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
|
# -*- coding: utf-8 -*-
from django_jinja.library import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
|
Adjust the example_projects to work
|
Adjust the example_projects to work
|
Python
|
bsd-3-clause
|
glogiotatidis/django-jinja,akx/django-jinja,akx/django-jinja,niwinz/django-jinja,niwinz/django-jinja,akx/django-jinja,niwinz/django-jinja,akx/django-jinja,glogiotatidis/django-jinja,glogiotatidis/django-jinja,glogiotatidis/django-jinja
|
# -*- coding: utf-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
Adjust the example_projects to work
|
# -*- coding: utf-8 -*-
from django_jinja.library import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
|
<commit_before># -*- coding: utf-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
<commit_msg>Adjust the example_projects to work<commit_after>
|
# -*- coding: utf-8 -*-
from django_jinja.library import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
|
# -*- coding: utf-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
Adjust the example_projects to work# -*- coding: utf-8 -*-
from django_jinja.library import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
|
<commit_before># -*- coding: utf-8 -*-
from django_jinja.base import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
<commit_msg>Adjust the example_projects to work<commit_after># -*- coding: utf-8 -*-
from django_jinja.library import Library
import jinja2
register = Library()
@register.filter
@jinja2.contextfilter
def datetimeformat(ctx, value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
@register.global_function
def hello(name):
return "Hello" + name
|
0a02b896c7f8499504a855652de22bab10824c69
|
database_setup.py
|
database_setup.py
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
Add description column to Restaurant
|
feat: Add description column to Restaurant
|
Python
|
mit
|
gsbullmer/restaurant-menu-directory,gsbullmer/restaurant-menu-directory
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
feat: Add description column to Restaurant
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
<commit_before>import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
<commit_msg>feat: Add description column to Restaurant<commit_after>
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
feat: Add description column to Restaurantimport sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
<commit_before>import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
<commit_msg>feat: Add description column to Restaurant<commit_after>import sys
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Restaurant(Base):
__tablename__ = 'restaurant'
name = Column(String(80), nullable = False)
description = Column(String(250))
id = Column(Integer, primary_key = True)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
}
class MenuItem(Base):
__tablename__ = 'menu_item'
name = Column(String(80), nullable = False)
id = Column(Integer,primary_key = True)
course = Column(String(250))
description = Column(String(250))
price = Column(String(8))
restaurant_id = Column(Integer, ForeignKey('restaurant.id'))
restaurant = relationship(Restaurant)
@property
def serialize(self):
return {
'name': self.name,
'description': self.description,
'id': self.id,
'price': self.price,
'course': self.course,
}
engine = create_engine('sqlite:///restaurantmenu.db')
Base.metadata.create_all(engine)
|
3e02a38a9ae52603f620a7969ce532b61de531d7
|
libgreader/__init__.py
|
libgreader/__init__.py
|
# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
|
# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
try:
import requests
except ImportError:
# Will occur during setup.py install
pass
else:
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
|
Fix import error during setup.py install
|
Fix import error during setup.py install
|
Python
|
mit
|
smurfix/librssreader,askedrelic/libgreader
|
# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
Fix import error during setup.py install
|
# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
try:
import requests
except ImportError:
# Will occur during setup.py install
pass
else:
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
|
<commit_before># -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
<commit_msg>Fix import error during setup.py install<commit_after>
|
# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
try:
import requests
except ImportError:
# Will occur during setup.py install
pass
else:
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
|
# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
Fix import error during setup.py install# -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
try:
import requests
except ImportError:
# Will occur during setup.py install
pass
else:
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
|
<commit_before># -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
<commit_msg>Fix import error during setup.py install<commit_after># -*- coding: utf-8 -*-
# libgreader
# Copyright (C) 2012 Matt Behrens <askedrelic@gmail.com>
# Python library for the Google Reader API
__author__ = "Matt Behrens <askedrelic@gmail.com>"
__version__ = "0.8.0"
__copyright__ = "Copyright (C) 2012 Matt Behrens"
try:
import requests
except ImportError:
# Will occur during setup.py install
pass
else:
from .googlereader import GoogleReader
from .auth import AuthenticationMethod, ClientAuthMethod, OAuthMethod, OAuth2Method
from .items import *
from .url import ReaderUrl
|
3d8d82be3528cc0150dac0c8ade1f6c306b412e4
|
channels/apps.py
|
channels/apps.py
|
from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
Add early import to fix problems with other packages and Twisted.
|
Add early import to fix problems with other packages and Twisted.
|
Python
|
bsd-3-clause
|
andrewgodwin/channels,andrewgodwin/django-channels,django/channels
|
from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
Add early import to fix problems with other packages and Twisted.
|
from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
<commit_before>from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
<commit_msg>Add early import to fix problems with other packages and Twisted.<commit_after>
|
from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
Add early import to fix problems with other packages and Twisted.from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
<commit_before>from django.apps import AppConfig
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
<commit_msg>Add early import to fix problems with other packages and Twisted.<commit_after>from django.apps import AppConfig
# We import this here to ensure the reactor is installed very early on
# in case other packages accidentally import twisted.internet.reactor
# (e.g. raven does this).
import daphne.server # noqa
#from .binding.base import BindingMetaclass
from .package_checks import check_all
class ChannelsConfig(AppConfig):
name = "channels"
verbose_name = "Channels"
def ready(self):
# Check versions
check_all()
# Do django monkeypatches
from .hacks import monkeypatch_django
monkeypatch_django()
# Instantiate bindings
#BindingMetaclass.register_all()
|
b94f849fe28918a343a142da57b6055064d5b194
|
tests/test_abort_generate_on_hook_error.py
|
tests/test_abort_generate_on_hook_error.py
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
Test that an error in post_gen_project aborts generation
|
Test that an error in post_gen_project aborts generation
|
Python
|
bsd-3-clause
|
dajose/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,dajose/cookiecutter,audreyr/cookiecutter,stevepiercy/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,pjbull/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,michaeljoseph/cookiecutter,stevepiercy/cookiecutter,Springerle/cookiecutter,luzfcb/cookiecutter
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
Test that an error in post_gen_project aborts generation
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
<commit_before># -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
<commit_msg>Test that an error in post_gen_project aborts generation<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
Test that an error in post_gen_project aborts generation# -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
<commit_before># -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
<commit_msg>Test that an error in post_gen_project aborts generation<commit_after># -*- coding: utf-8 -*-
import pytest
from cookiecutter import generate
from cookiecutter import exceptions
@pytest.mark.usefixtures('clean_system')
def test_pre_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "yes",
"abort_post_gen": "no"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
@pytest.mark.usefixtures('clean_system')
def test_post_gen_hook(tmpdir):
context = {
'cookiecutter': {
"repo_dir": "foobar",
"abort_pre_gen": "no",
"abort_post_gen": "yes"
}
}
with pytest.raises(exceptions.FailedHookException):
generate.generate_files(
repo_dir='tests/hooks-abort-render',
context=context,
output_dir=str(tmpdir)
)
assert not tmpdir.join('foobar').isdir()
|
1f66670b94d2eca70ecf8e26b21f8b28986154b9
|
test-mm.py
|
test-mm.py
|
from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
path = "%s/%s/font.otf" % (baseDir, master)
font = autohint.openOpenTypeFile(path, "font.otf", None)
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in names:
glyph = font.convertToBez(name, False)
glyphs.append(glyph[0])
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
|
from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
options = autohint.ACOptions()
options.quiet = True
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, "font.ufo", False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in glyphList:
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
glyphs.append(glyph)
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
|
Use the UFOs not the OTFs
|
Use the UFOs not the OTFs
Oops, the OTF are not interpolation compatible due to overlap removal, I
should have use the UFOs all along. Now the script passes without
errors, still need to verify the output.
|
Python
|
apache-2.0
|
khaledhosny/psautohint,khaledhosny/psautohint
|
from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
path = "%s/%s/font.otf" % (baseDir, master)
font = autohint.openOpenTypeFile(path, "font.otf", None)
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in names:
glyph = font.convertToBez(name, False)
glyphs.append(glyph[0])
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
Use the UFOs not the OTFs
Oops, the OTF are not interpolation compatible due to overlap removal, I
should have use the UFOs all along. Now the script passes without
errors, still need to verify the output.
|
from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
options = autohint.ACOptions()
options.quiet = True
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, "font.ufo", False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in glyphList:
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
glyphs.append(glyph)
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
|
<commit_before>from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
path = "%s/%s/font.otf" % (baseDir, master)
font = autohint.openOpenTypeFile(path, "font.otf", None)
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in names:
glyph = font.convertToBez(name, False)
glyphs.append(glyph[0])
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
<commit_msg>Use the UFOs not the OTFs
Oops, the OTF are not interpolation compatible due to overlap removal, I
should have use the UFOs all along. Now the script passes without
errors, still need to verify the output.<commit_after>
|
from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
options = autohint.ACOptions()
options.quiet = True
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, "font.ufo", False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in glyphList:
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
glyphs.append(glyph)
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
|
from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
path = "%s/%s/font.otf" % (baseDir, master)
font = autohint.openOpenTypeFile(path, "font.otf", None)
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in names:
glyph = font.convertToBez(name, False)
glyphs.append(glyph[0])
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
Use the UFOs not the OTFs
Oops, the OTF are not interpolation compatible due to overlap removal, I
should have use the UFOs all along. Now the script passes without
errors, still need to verify the output.from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
options = autohint.ACOptions()
options.quiet = True
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, "font.ufo", False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in glyphList:
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
glyphs.append(glyph)
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
|
<commit_before>from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
path = "%s/%s/font.otf" % (baseDir, master)
font = autohint.openOpenTypeFile(path, "font.otf", None)
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in names:
glyph = font.convertToBez(name, False)
glyphs.append(glyph[0])
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
<commit_msg>Use the UFOs not the OTFs
Oops, the OTF are not interpolation compatible due to overlap removal, I
should have use the UFOs all along. Now the script passes without
errors, still need to verify the output.<commit_after>from psautohint import autohint
from psautohint import psautohint
baseDir = "tests/data/source-code-pro"
masters = ("Black", "Bold", "ExtraLight", "Light", "Medium", "Regular", "Semibold")
glyphList = None
fonts = []
for master in masters:
print("Hinting %s" % master)
options = autohint.ACOptions()
options.quiet = True
path = "%s/%s/font.ufo" % (baseDir, master)
font = autohint.openUFOFile(path, "font.ufo", False, options)
font.useProcessedLayer = False
names = font.getGlyphList()
info = font.getFontInfo(font.getPSName(), path, False, False, [], [])
info = info.getFontInfo()
if glyphList is None:
glyphList = names
else:
assert glyphList == names
glyphs = []
for name in glyphList:
glyph = font.convertToBez(name, False, True)[0]
if not glyph:
glyph = "%%%s\n" % name
glyphs.append(glyph)
fonts.append(psautohint.autohint(info, glyphs, False, False, False))
glyphs = []
for i in range(len(glyphList)):
glyphs.append([f[i] for f in fonts])
print("MM Hinting")
glyphs = psautohint.autohintmm(info, glyphs, masters, True)
|
7f6da4dee6464e48a0e6b491f3f740a750e86ed2
|
dataactcore/scripts/resetAlembicVersion.py
|
dataactcore/scripts/resetAlembicVersion.py
|
import argparse
from dataactcore.models.errorInterface import ErrorInterface
from dataactcore.models.jobTrackerInterface import JobTrackerInterface
from dataactcore.models.userInterface import UserInterface
from dataactcore.models.validationInterface import ValidationInterface
from sqlalchemy import MetaData, Table
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import update
def reset_alembic(alembic_version):
engine_list = [
ErrorInterface().engine,
JobTrackerInterface().engine,
UserInterface().engine,
ValidationInterface().engine,
]
for e in engine_list:
Session = sessionmaker(bind=e)
session = Session()
metadata = MetaData(bind=e)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
session.execute(u)
session.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version tables across broker databases.")
parser.add_argument(
'version', help="Version to set the Alembic migration tables to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
|
import argparse
from sqlalchemy import MetaData, Table
from sqlalchemy.sql import update
from dataactcore.interfaces.db import GlobalDB
from dataactvalidator.app import createApp
def reset_alembic(alembic_version):
with createApp().app_context():
db = GlobalDB.db()
engine = db.engine
sess = db.session
metadata = MetaData(bind=engine)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
sess.execute(u)
sess.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version table.")
parser.add_argument(
'version', help="Version to set the Alembic migration table to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
|
Remove db interfaces from the alembic version reset helper script.
|
Remove db interfaces from the alembic version reset helper script.
Arguably, this script is no longer especially useful now that we
only have a single database for the broker. That said, removed the
interfaces in case folks are still using it.
|
Python
|
cc0-1.0
|
fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend
|
import argparse
from dataactcore.models.errorInterface import ErrorInterface
from dataactcore.models.jobTrackerInterface import JobTrackerInterface
from dataactcore.models.userInterface import UserInterface
from dataactcore.models.validationInterface import ValidationInterface
from sqlalchemy import MetaData, Table
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import update
def reset_alembic(alembic_version):
engine_list = [
ErrorInterface().engine,
JobTrackerInterface().engine,
UserInterface().engine,
ValidationInterface().engine,
]
for e in engine_list:
Session = sessionmaker(bind=e)
session = Session()
metadata = MetaData(bind=e)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
session.execute(u)
session.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version tables across broker databases.")
parser.add_argument(
'version', help="Version to set the Alembic migration tables to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
Remove db interfaces from the alembic version reset helper script.
Arguably, this script is no longer especially useful now that we
only have a single database for the broker. That said, removed the
interfaces in case folks are still using it.
|
import argparse
from sqlalchemy import MetaData, Table
from sqlalchemy.sql import update
from dataactcore.interfaces.db import GlobalDB
from dataactvalidator.app import createApp
def reset_alembic(alembic_version):
with createApp().app_context():
db = GlobalDB.db()
engine = db.engine
sess = db.session
metadata = MetaData(bind=engine)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
sess.execute(u)
sess.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version table.")
parser.add_argument(
'version', help="Version to set the Alembic migration table to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
|
<commit_before>import argparse
from dataactcore.models.errorInterface import ErrorInterface
from dataactcore.models.jobTrackerInterface import JobTrackerInterface
from dataactcore.models.userInterface import UserInterface
from dataactcore.models.validationInterface import ValidationInterface
from sqlalchemy import MetaData, Table
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import update
def reset_alembic(alembic_version):
engine_list = [
ErrorInterface().engine,
JobTrackerInterface().engine,
UserInterface().engine,
ValidationInterface().engine,
]
for e in engine_list:
Session = sessionmaker(bind=e)
session = Session()
metadata = MetaData(bind=e)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
session.execute(u)
session.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version tables across broker databases.")
parser.add_argument(
'version', help="Version to set the Alembic migration tables to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
<commit_msg>Remove db interfaces from the alembic version reset helper script.
Arguably, this script is no longer especially useful now that we
only have a single database for the broker. That said, removed the
interfaces in case folks are still using it.<commit_after>
|
import argparse
from sqlalchemy import MetaData, Table
from sqlalchemy.sql import update
from dataactcore.interfaces.db import GlobalDB
from dataactvalidator.app import createApp
def reset_alembic(alembic_version):
with createApp().app_context():
db = GlobalDB.db()
engine = db.engine
sess = db.session
metadata = MetaData(bind=engine)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
sess.execute(u)
sess.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version table.")
parser.add_argument(
'version', help="Version to set the Alembic migration table to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
|
import argparse
from dataactcore.models.errorInterface import ErrorInterface
from dataactcore.models.jobTrackerInterface import JobTrackerInterface
from dataactcore.models.userInterface import UserInterface
from dataactcore.models.validationInterface import ValidationInterface
from sqlalchemy import MetaData, Table
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import update
def reset_alembic(alembic_version):
engine_list = [
ErrorInterface().engine,
JobTrackerInterface().engine,
UserInterface().engine,
ValidationInterface().engine,
]
for e in engine_list:
Session = sessionmaker(bind=e)
session = Session()
metadata = MetaData(bind=e)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
session.execute(u)
session.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version tables across broker databases.")
parser.add_argument(
'version', help="Version to set the Alembic migration tables to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
Remove db interfaces from the alembic version reset helper script.
Arguably, this script is no longer especially useful now that we
only have a single database for the broker. That said, removed the
interfaces in case folks are still using it.import argparse
from sqlalchemy import MetaData, Table
from sqlalchemy.sql import update
from dataactcore.interfaces.db import GlobalDB
from dataactvalidator.app import createApp
def reset_alembic(alembic_version):
with createApp().app_context():
db = GlobalDB.db()
engine = db.engine
sess = db.session
metadata = MetaData(bind=engine)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
sess.execute(u)
sess.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version table.")
parser.add_argument(
'version', help="Version to set the Alembic migration table to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
|
<commit_before>import argparse
from dataactcore.models.errorInterface import ErrorInterface
from dataactcore.models.jobTrackerInterface import JobTrackerInterface
from dataactcore.models.userInterface import UserInterface
from dataactcore.models.validationInterface import ValidationInterface
from sqlalchemy import MetaData, Table
from sqlalchemy.orm import sessionmaker
from sqlalchemy.sql import update
def reset_alembic(alembic_version):
engine_list = [
ErrorInterface().engine,
JobTrackerInterface().engine,
UserInterface().engine,
ValidationInterface().engine,
]
for e in engine_list:
Session = sessionmaker(bind=e)
session = Session()
metadata = MetaData(bind=e)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
session.execute(u)
session.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version tables across broker databases.")
parser.add_argument(
'version', help="Version to set the Alembic migration tables to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
<commit_msg>Remove db interfaces from the alembic version reset helper script.
Arguably, this script is no longer especially useful now that we
only have a single database for the broker. That said, removed the
interfaces in case folks are still using it.<commit_after>import argparse
from sqlalchemy import MetaData, Table
from sqlalchemy.sql import update
from dataactcore.interfaces.db import GlobalDB
from dataactvalidator.app import createApp
def reset_alembic(alembic_version):
with createApp().app_context():
db = GlobalDB.db()
engine = db.engine
sess = db.session
metadata = MetaData(bind=engine)
alembic_table = Table('alembic_version', metadata, autoload=True)
u = update(alembic_table)
u = u.values({"version_num": alembic_version})
sess.execute(u)
sess.commit()
parser = argparse.ArgumentParser\
(description="Reset alembic version table.")
parser.add_argument(
'version', help="Version to set the Alembic migration table to.")
v = vars(parser.parse_args())['version']
reset_alembic(v)
|
14329daf571400812594c0388eac87538cd10079
|
denim/api.py
|
denim/api.py
|
from fabric import api as __api
# Setup some default values.
__api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_package, deploy_path, package_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
@__api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@__api.task
def environment():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
|
from fabric import api as _api
# Setup some default values.
_api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_application, deploy_path, application_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
# Pending deprecation
from denim.paths import (cd_package, package_path)
@_api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@_api.task
def environments():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
|
Break out items pending deprecation, remove double underscores
|
Break out items pending deprecation, remove double underscores
|
Python
|
bsd-2-clause
|
timsavage/denim
|
from fabric import api as __api
# Setup some default values.
__api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_package, deploy_path, package_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
@__api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@__api.task
def environment():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
Break out items pending deprecation, remove double underscores
|
from fabric import api as _api
# Setup some default values.
_api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_application, deploy_path, application_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
# Pending deprecation
from denim.paths import (cd_package, package_path)
@_api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@_api.task
def environments():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
|
<commit_before>from fabric import api as __api
# Setup some default values.
__api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_package, deploy_path, package_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
@__api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@__api.task
def environment():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
<commit_msg>Break out items pending deprecation, remove double underscores<commit_after>
|
from fabric import api as _api
# Setup some default values.
_api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_application, deploy_path, application_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
# Pending deprecation
from denim.paths import (cd_package, package_path)
@_api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@_api.task
def environments():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
|
from fabric import api as __api
# Setup some default values.
__api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_package, deploy_path, package_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
@__api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@__api.task
def environment():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
Break out items pending deprecation, remove double underscoresfrom fabric import api as _api
# Setup some default values.
_api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_application, deploy_path, application_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
# Pending deprecation
from denim.paths import (cd_package, package_path)
@_api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@_api.task
def environments():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
|
<commit_before>from fabric import api as __api
# Setup some default values.
__api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_package, deploy_path, package_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
@__api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@__api.task
def environment():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
<commit_msg>Break out items pending deprecation, remove double underscores<commit_after>from fabric import api as _api
# Setup some default values.
_api.env.deploy_user = 'webapps'
from denim.paths import (cd_deploy, cd_application, deploy_path, application_path)
from denim import (scm, service, system, virtualenv, webserver)
from denim.decorators import deploy_env
# Pending deprecation
from denim.paths import (cd_package, package_path)
@_api.task(name="help")
def show_help():
"""
Help on common operations.
"""
from denim.environment import get_environments
import denim
print """
Common operations with Denim (%(version)s).
Provision server:
> fab {%(environments)s} init
Deploy (require a source control revision to be supplied. i.e. master):
> fab {%(environments)s} deploy:{revision}
Status of service:
> fab {%(environments)s} service.status
""" % {
'environments': '|'.join(get_environments()),
'version': denim.__version__,
}
@_api.task
def environments():
"""
Environments defined in fabfile.
"""
from denim.environment import get_environments
print 'Environments defined in fab file:'
print ', '.join(get_environments())
|
770f9dd75a223fb31a18af2fcb089398663f2065
|
concentration.py
|
concentration.py
|
from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
|
from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
def getConcentrationRequirement(self, string):
return self.requirements[string]
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
|
Add a getConcentrationRequirement to corrospond to getMajorRequirement
|
Add a getConcentrationRequirement to corrospond to getMajorRequirement
|
Python
|
agpl-3.0
|
hawkrives/gobbldygook,hawkrives/gobbldygook,hawkrives/gobbldygook
|
from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
Add a getConcentrationRequirement to corrospond to getMajorRequirement
|
from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
def getConcentrationRequirement(self, string):
return self.requirements[string]
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
|
<commit_before>from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
<commit_msg>Add a getConcentrationRequirement to corrospond to getMajorRequirement<commit_after>
|
from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
def getConcentrationRequirement(self, string):
return self.requirements[string]
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
|
from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
Add a getConcentrationRequirement to corrospond to getMajorRequirementfrom major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
def getConcentrationRequirement(self, string):
return self.requirements[string]
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
|
<commit_before>from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
<commit_msg>Add a getConcentrationRequirement to corrospond to getMajorRequirement<commit_after>from major import Major
class Concentration(Major):
def __init__(self, dept="NONE"):
super().__init__(dept, path="concentrations/")
def getConcentrationRequirement(self, string):
return self.requirements[string]
if __name__ == '__main__':
tmp = [
Concentration(dept="Asian")
]
for i in tmp:
print(i)
|
1f61ece6f6b0950706ebef159665eafbfeeaa1fd
|
app/api/utils/containerMapper.py
|
app/api/utils/containerMapper.py
|
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture')),
'created_at': container.created_at
}
|
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
image = 'N/A'
if container.config.get('image.os') != None and container.config.get('image.release') != None and container.config.get('image.architecture') != None:
image = ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture'))
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': image,
'created_at': container.created_at
}
|
Fix container list bug when missing image
|
Fix container list bug when missing image
|
Python
|
apache-2.0
|
AdaptiveScale/lxdui,AdaptiveScale/lxdui,AdaptiveScale/lxdui,AdaptiveScale/lxdui
|
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture')),
'created_at': container.created_at
}Fix container list bug when missing image
|
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
image = 'N/A'
if container.config.get('image.os') != None and container.config.get('image.release') != None and container.config.get('image.architecture') != None:
image = ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture'))
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': image,
'created_at': container.created_at
}
|
<commit_before>
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture')),
'created_at': container.created_at
}<commit_msg>Fix container list bug when missing image<commit_after>
|
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
image = 'N/A'
if container.config.get('image.os') != None and container.config.get('image.release') != None and container.config.get('image.architecture') != None:
image = ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture'))
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': image,
'created_at': container.created_at
}
|
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture')),
'created_at': container.created_at
}Fix container list bug when missing image
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
image = 'N/A'
if container.config.get('image.os') != None and container.config.get('image.release') != None and container.config.get('image.architecture') != None:
image = ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture'))
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': image,
'created_at': container.created_at
}
|
<commit_before>
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture')),
'created_at': container.created_at
}<commit_msg>Fix container list bug when missing image<commit_after>
def getContainerDetails(container):
ip = 'N/A'
if container.state().network != None and container.state().network.get('eth0') != None:
if len(container.state().network.get('eth0')['addresses']) > 0:
ip = container.state().network['eth0']['addresses'][0].get('address', 'N/A')
image = 'N/A'
if container.config.get('image.os') != None and container.config.get('image.release') != None and container.config.get('image.architecture') != None:
image = ''.join(container.config.get('image.os') + ' ' + container.config.get('image.release') + ' ' + container.config.get('image.architecture'))
return {
'name': container.name,
'status': container.status,
'ip': ip,
'ephemeral': container.ephemeral,
'image': image,
'created_at': container.created_at
}
|
26538b01b5865c4b5f68fc0ea0f22268d4768376
|
scrapy/contrib/memdebug.py
|
scrapy/contrib/memdebug.py
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.engine_stopped, signals.engine_stopped)
return o
def engine_stopped(self):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
def spider_closed(self, spider, reason):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage), spider=spider)
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict), spider=spider)
|
Fix logging of stats collected by MemoryDebugger extension.
|
Fix logging of stats collected by MemoryDebugger extension.
Stats are printed on spider_closed event;
engine_stopped signal is called after spider_closed signal,
so stats for MemoryDebugger extension were not printed to user.
|
Python
|
bsd-3-clause
|
olorz/scrapy,OpenWhere/scrapy,cursesun/scrapy,pablohoffman/scrapy,avtoritet/scrapy,nikgr95/scrapy,eLRuLL/scrapy,rolando-contrib/scrapy,amboxer21/scrapy,aivarsk/scrapy,zjuwangg/scrapy,dacjames/scrapy,cursesun/scrapy,TarasRudnyk/scrapy,Digenis/scrapy,bmess/scrapy,agreen/scrapy,CENDARI/scrapy,redapple/scrapy,shaform/scrapy,lacrazyboy/scrapy,fontenele/scrapy,scorphus/scrapy,JacobStevenR/scrapy,CENDARI/scrapy,beni55/scrapy,zackslash/scrapy,liyy7/scrapy,rdowinton/scrapy,fontenele/scrapy,bmess/scrapy,smaty1/scrapy,haiiiiiyun/scrapy,huoxudong125/scrapy,rolando-contrib/scrapy,mlyundin/scrapy,Timeship/scrapy,amboxer21/scrapy,fqul/scrapy,stenskjaer/scrapy,carlosp420/scrapy,jdemaeyer/scrapy,legendtkl/scrapy,kimimj/scrapy,ashishnerkar1/scrapy,profjrr/scrapy,gbirke/scrapy,kashyap32/scrapy,xiao26/scrapy,foromer4/scrapy,zjuwangg/scrapy,jc0n/scrapy,jiezhu2007/scrapy,darkrho/scrapy-scrapy,Allianzcortex/scrapy,Digenis/scrapy,zhangtao11/scrapy,elacuesta/scrapy,devGregA/scrapy,TarasRudnyk/scrapy,Lucifer-Kim/scrapy,YeelerG/scrapy,eLRuLL/scrapy,KublaikhanGeek/scrapy,cleydson/scrapy,huoxudong125/scrapy,raphaelfruneaux/scrapy,nikgr95/scrapy,GregoryVigoTorres/scrapy,carlosp420/scrapy,wzyuliyang/scrapy,nguyenhongson03/scrapy,IvanGavran/scrapy,jdemaeyer/scrapy,Bourneer/scrapy,gbirke/scrapy,xiao26/scrapy,haiiiiiyun/scrapy,elijah513/scrapy,songfj/scrapy,olafdietsche/scrapy,ssteo/scrapy,jiezhu2007/scrapy,nguyenhongson03/scrapy,KublaikhanGeek/scrapy,redapple/scrapy,jorik041/scrapy,hyrole/scrapy,umrashrf/scrapy,Ryezhang/scrapy,dgillis/scrapy,pfctdayelise/scrapy,sardok/scrapy,rklabs/scrapy,nguyenhongson03/scrapy,crasker/scrapy,arush0311/scrapy,tagatac/scrapy,w495/scrapy,scrapy/scrapy,Djlavoy/scrapy,Allianzcortex/scrapy,tliber/scrapy,zhangtao11/scrapy,yarikoptic/scrapy,jeffreyjinfeng/scrapy,jamesblunt/scrapy,redapple/scrapy,URXtech/scrapy,dracony/scrapy,fafaman/scrapy,livepy/scrapy,kalessin/scrapy,cursesun/scrapy,beni55/scrapy,Lucifer-Kim/scrapy,Chenmxs/scrapy,hbwzhsh/scrapy,zhangtao11/scrapy,avtoritet/scrapy,pranjalpatil/scrapy,rolando/scrapy,pawelmhm/scrapy,ArturGaspar/scrapy,aivarsk/scrapy,fafaman/scrapy,Preetwinder/scrapy,Parlin-Galanodel/scrapy,elijah513/scrapy,yidongliu/scrapy,kimimj/scrapy,1yvT0s/scrapy,barraponto/scrapy,tliber/scrapy,jc0n/scrapy,aivarsk/scrapy,rahulsharma1991/scrapy,dhenyjarasandy/scrapy,zorojean/scrapy,kmike/scrapy,Parlin-Galanodel/scrapy,amboxer21/scrapy,jiezhu2007/scrapy,Geeglee/scrapy,legendtkl/scrapy,Slater-Victoroff/scrapy,moraesnicol/scrapy,hyrole/scrapy,dgillis/scrapy,starrify/scrapy,sigma-random/scrapy,chekunkov/scrapy,pawelmhm/scrapy,wzyuliyang/scrapy,Preetwinder/scrapy,legendtkl/scrapy,fpy171/scrapy,taito/scrapy,Digenis/scrapy,farhan0581/scrapy,Chenmxs/scrapy,nowopen/scrapy,yusofm/scrapy,scrapy/scrapy,starrify/scrapy,mgedmin/scrapy,ashishnerkar1/scrapy,scorphus/scrapy,WilliamKinaan/scrapy,Ryezhang/scrapy,Preetwinder/scrapy,ssteo/scrapy,eliasdorneles/scrapy,agusc/scrapy,Ryezhang/scrapy,zorojean/scrapy,jc0n/scrapy,godfreyy/scrapy,wenyu1001/scrapy,raphaelfruneaux/scrapy,pfctdayelise/scrapy,Cnfc19932/scrapy,crasker/scrapy,agusc/scrapy,Lucifer-Kim/scrapy,Geeglee/scrapy,avtoritet/scrapy,rolando-contrib/scrapy,hwsyy/scrapy,famorted/scrapy,hwsyy/scrapy,beni55/scrapy,kmike/scrapy,eLRuLL/scrapy,GregoryVigoTorres/scrapy,elijah513/scrapy,rootAvish/scrapy,cyrixhero/scrapy,heamon7/scrapy,github-account-because-they-want-it/scrapy,rolando/scrapy,yarikoptic/scrapy,wangjun/scrapy,Djlavoy/scrapy,ssh-odoo/scrapy,jorik041/scrapy,zjuwangg/scrapy,1yvT0s/scrapy,Geeglee/scrapy,olorz/scrapy,agusc/scrapy,coderabhishek/scrapy,webmakin/scrapy,agreen/scrapy,CodeJuan/scrapy,yarikoptic/scrapy,hectoruelo/scrapy,pranjalpatil/scrapy,nikgr95/scrapy,AaronTao1990/scrapy,pombredanne/scrapy,pablohoffman/scrapy,rdowinton/scrapy,Slater-Victoroff/scrapy,ylcolala/scrapy,cyberplant/scrapy,lacrazyboy/scrapy,Partoo/scrapy,irwinlove/scrapy,dgillis/scrapy,pombredanne/scrapy,Bourneer/scrapy,taito/scrapy,dracony/scrapy,jeffreyjinfeng/scrapy,chekunkov/scrapy,Cnfc19932/scrapy,csalazar/scrapy,fafaman/scrapy,rdowinton/scrapy,tagatac/scrapy,crasker/scrapy,kazitanvirahsan/scrapy,Parlin-Galanodel/scrapy,hyrole/scrapy,TarasRudnyk/scrapy,mgedmin/scrapy,wenyu1001/scrapy,hansenDise/scrapy,snowdream1314/scrapy,tntC4stl3/scrapy,liyy7/scrapy,ssh-odoo/scrapy,irwinlove/scrapy,ramiro/scrapy,webmakin/scrapy,profjrr/scrapy,huoxudong125/scrapy,URXtech/scrapy,raphaelfruneaux/scrapy,dangra/scrapy,IvanGavran/scrapy,z-fork/scrapy,foromer4/scrapy,stenskjaer/scrapy,wujuguang/scrapy,curita/scrapy,joshlk/scrapy,OpenWhere/scrapy,wangjun/scrapy,jdemaeyer/scrapy,ylcolala/scrapy,sardok/scrapy,rklabs/scrapy,livepy/scrapy,finfish/scrapy,dangra/scrapy,famorted/scrapy,songfj/scrapy,pranjalpatil/scrapy,zackslash/scrapy,kalessin/scrapy,hectoruelo/scrapy,olorz/scrapy,Timeship/scrapy,tagatac/scrapy,smaty1/scrapy,farhan0581/scrapy,mgedmin/scrapy,kazitanvirahsan/scrapy,rootAvish/scrapy,finfish/scrapy,jeffreyjinfeng/scrapy,snowdream1314/scrapy,CENDARI/scrapy,elacuesta/scrapy,codebhendi/scrapy,hwsyy/scrapy,ramiro/scrapy,fpy171/scrapy,tliber/scrapy,darkrho/scrapy-scrapy,umrashrf/scrapy,foromer4/scrapy,fpy171/scrapy,darkrho/scrapy-scrapy,dacjames/scrapy,mlyundin/scrapy,wzyuliyang/scrapy,johnardavies/scrapy,mlyundin/scrapy,xiao26/scrapy,AaronTao1990/scrapy,1yvT0s/scrapy,AaronTao1990/scrapy,bmess/scrapy,snowdream1314/scrapy,johnardavies/scrapy,dracony/scrapy,shaform/scrapy,stenskjaer/scrapy,Djlavoy/scrapy,pawelmhm/scrapy,cleydson/scrapy,webmakin/scrapy,kashyap32/scrapy,moraesnicol/scrapy,olafdietsche/scrapy,hbwzhsh/scrapy,GregoryVigoTorres/scrapy,Zephor5/scrapy,cyrixhero/scrapy,barraponto/scrapy,scrapy/scrapy,Bourneer/scrapy,URXtech/scrapy,livepy/scrapy,arush0311/scrapy,IvanGavran/scrapy,elacuesta/scrapy,ArturGaspar/scrapy,profjrr/scrapy,zorojean/scrapy,OpenWhere/scrapy,coderabhishek/scrapy,shaform/scrapy,famorted/scrapy,finfish/scrapy,umrashrf/scrapy,farhan0581/scrapy,Timeship/scrapy,ENjOyAbLE1991/scrapy,carlosp420/scrapy,Zephor5/scrapy,eliasdorneles/scrapy,devGregA/scrapy,jamesblunt/scrapy,z-fork/scrapy,rahulsharma1991/scrapy,ENjOyAbLE1991/scrapy,pombredanne/scrapy,kazitanvirahsan/scrapy,wujuguang/scrapy,Adai0808/scrapy-1,chekunkov/scrapy,github-account-because-they-want-it/scrapy,Chenmxs/scrapy,godfreyy/scrapy,WilliamKinaan/scrapy,moraesnicol/scrapy,JacobStevenR/scrapy,dhenyjarasandy/scrapy,ssteo/scrapy,nfunato/scrapy,heamon7/scrapy,curita/scrapy,zackslash/scrapy,z-fork/scrapy,kimimj/scrapy,hbwzhsh/scrapy,sigma-random/scrapy,ylcolala/scrapy,dhenyjarasandy/scrapy,tntC4stl3/scrapy,w495/scrapy,devGregA/scrapy,Adai0808/scrapy-1,rklabs/scrapy,agreen/scrapy,codebhendi/scrapy,olafdietsche/scrapy,hectoruelo/scrapy,csalazar/scrapy,codebhendi/scrapy,Partoo/scrapy,pablohoffman/scrapy,dacjames/scrapy,barraponto/scrapy,WilliamKinaan/scrapy,dangra/scrapy,irwinlove/scrapy,godfreyy/scrapy,arush0311/scrapy,wenyu1001/scrapy,wujuguang/scrapy,kmike/scrapy,wangjun/scrapy,Cnfc19932/scrapy,Partoo/scrapy,yusofm/scrapy,curita/scrapy,github-account-because-they-want-it/scrapy,JacobStevenR/scrapy,rootAvish/scrapy,fqul/scrapy,Zephor5/scrapy,fontenele/scrapy,Slater-Victoroff/scrapy,csalazar/scrapy,YeelerG/scrapy,kashyap32/scrapy,ArturGaspar/scrapy,scorphus/scrapy,CodeJuan/scrapy,rolando/scrapy,heamon7/scrapy,cyberplant/scrapy,KublaikhanGeek/scrapy,Adai0808/scrapy-1,tntC4stl3/scrapy,nowopen/scrapy,rahulsharma1991/scrapy,lacrazyboy/scrapy,yidongliu/scrapy,ENjOyAbLE1991/scrapy,YeelerG/scrapy,haiiiiiyun/scrapy,nfunato/scrapy,joshlk/scrapy,nowopen/scrapy,starrify/scrapy,cleydson/scrapy,cyrixhero/scrapy,ssh-odoo/scrapy,yusofm/scrapy,w495/scrapy,hansenDise/scrapy,cyberplant/scrapy,songfj/scrapy,coderabhishek/scrapy,hansenDise/scrapy,CodeJuan/scrapy,kalessin/scrapy,liyy7/scrapy,Allianzcortex/scrapy,jorik041/scrapy,johnardavies/scrapy,yidongliu/scrapy,ramiro/scrapy,eliasdorneles/scrapy,fqul/scrapy,taito/scrapy,joshlk/scrapy,nfunato/scrapy,smaty1/scrapy,pfctdayelise/scrapy
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.engine_stopped, signals.engine_stopped)
return o
def engine_stopped(self):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
Fix logging of stats collected by MemoryDebugger extension.
Stats are printed on spider_closed event;
engine_stopped signal is called after spider_closed signal,
so stats for MemoryDebugger extension were not printed to user.
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
def spider_closed(self, spider, reason):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage), spider=spider)
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict), spider=spider)
|
<commit_before>"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.engine_stopped, signals.engine_stopped)
return o
def engine_stopped(self):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
<commit_msg>Fix logging of stats collected by MemoryDebugger extension.
Stats are printed on spider_closed event;
engine_stopped signal is called after spider_closed signal,
so stats for MemoryDebugger extension were not printed to user.<commit_after>
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
def spider_closed(self, spider, reason):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage), spider=spider)
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict), spider=spider)
|
"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.engine_stopped, signals.engine_stopped)
return o
def engine_stopped(self):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
Fix logging of stats collected by MemoryDebugger extension.
Stats are printed on spider_closed event;
engine_stopped signal is called after spider_closed signal,
so stats for MemoryDebugger extension were not printed to user."""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
def spider_closed(self, spider, reason):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage), spider=spider)
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict), spider=spider)
|
<commit_before>"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.engine_stopped, signals.engine_stopped)
return o
def engine_stopped(self):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage))
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict))
<commit_msg>Fix logging of stats collected by MemoryDebugger extension.
Stats are printed on spider_closed event;
engine_stopped signal is called after spider_closed signal,
so stats for MemoryDebugger extension were not printed to user.<commit_after>"""
MemoryDebugger extension
See documentation in docs/topics/extensions.rst
"""
import gc
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.utils.trackref import live_refs
class MemoryDebugger(object):
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
if not crawler.settings.getbool('MEMDEBUG_ENABLED'):
raise NotConfigured
o = cls(crawler.stats)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
def spider_closed(self, spider, reason):
gc.collect()
self.stats.set_value('memdebug/gc_garbage_count', len(gc.garbage), spider=spider)
for cls, wdict in live_refs.iteritems():
if not wdict:
continue
self.stats.set_value('memdebug/live_refs/%s' % cls.__name__, len(wdict), spider=spider)
|
7fc576f3dd4d8d7dbe64dbecfc6dcc9ac9ad6b12
|
conman/routes/utils.py
|
conman/routes/utils.py
|
import os
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
"""
paths = ['/']
path = path.rstrip('/')
while path:
paths.insert(1, path + '/')
path = os.path.split(path)[0]
if path == '/':
break
return paths
|
from collections import deque
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
eg: /path/containing/subpaths/ becomes:
/
/path/
/path/containing/
/path/containing/subpaths/
"""
paths = deque()
path = path or '/'
while path:
path = path.rpartition('/')[0]
paths.appendleft(path + '/')
return list(paths)
|
Refactor split_path code for brevity and clarity
|
Refactor split_path code for brevity and clarity
|
Python
|
bsd-2-clause
|
meshy/django-conman,meshy/django-conman
|
import os
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
"""
paths = ['/']
path = path.rstrip('/')
while path:
paths.insert(1, path + '/')
path = os.path.split(path)[0]
if path == '/':
break
return paths
Refactor split_path code for brevity and clarity
|
from collections import deque
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
eg: /path/containing/subpaths/ becomes:
/
/path/
/path/containing/
/path/containing/subpaths/
"""
paths = deque()
path = path or '/'
while path:
path = path.rpartition('/')[0]
paths.appendleft(path + '/')
return list(paths)
|
<commit_before>import os
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
"""
paths = ['/']
path = path.rstrip('/')
while path:
paths.insert(1, path + '/')
path = os.path.split(path)[0]
if path == '/':
break
return paths
<commit_msg>Refactor split_path code for brevity and clarity<commit_after>
|
from collections import deque
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
eg: /path/containing/subpaths/ becomes:
/
/path/
/path/containing/
/path/containing/subpaths/
"""
paths = deque()
path = path or '/'
while path:
path = path.rpartition('/')[0]
paths.appendleft(path + '/')
return list(paths)
|
import os
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
"""
paths = ['/']
path = path.rstrip('/')
while path:
paths.insert(1, path + '/')
path = os.path.split(path)[0]
if path == '/':
break
return paths
Refactor split_path code for brevity and clarityfrom collections import deque
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
eg: /path/containing/subpaths/ becomes:
/
/path/
/path/containing/
/path/containing/subpaths/
"""
paths = deque()
path = path or '/'
while path:
path = path.rpartition('/')[0]
paths.appendleft(path + '/')
return list(paths)
|
<commit_before>import os
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
"""
paths = ['/']
path = path.rstrip('/')
while path:
paths.insert(1, path + '/')
path = os.path.split(path)[0]
if path == '/':
break
return paths
<commit_msg>Refactor split_path code for brevity and clarity<commit_after>from collections import deque
def split_path(path):
"""
Split a url path into its sub-paths.
A url's sub-paths consist of all substrings ending in / and starting at
the start of the url.
eg: /path/containing/subpaths/ becomes:
/
/path/
/path/containing/
/path/containing/subpaths/
"""
paths = deque()
path = path or '/'
while path:
path = path.rpartition('/')[0]
paths.appendleft(path + '/')
return list(paths)
|
15c773250b52a03196a023e286f4f3a2405ba94e
|
backend/uclapi/dashboard/app_helpers.py
|
backend/uclapi/dashboard/app_helpers.py
|
from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
|
from binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret
|
Add helpers to the dashboard code to generate OAuth keys
|
Add helpers to the dashboard code to generate OAuth keys
|
Python
|
mit
|
uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi
|
from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
Add helpers to the dashboard code to generate OAuth keys
|
from binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret
|
<commit_before>from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
<commit_msg>Add helpers to the dashboard code to generate OAuth keys<commit_after>
|
from binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret
|
from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
Add helpers to the dashboard code to generate OAuth keysfrom binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret
|
<commit_before>from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
<commit_msg>Add helpers to the dashboard code to generate OAuth keys<commit_after>from binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret
|
5bb4c61e9950de4c8c000a4ab02b0c901e0b06ff
|
version.py
|
version.py
|
"""
automatically maintains the latest git tag + revision info in a python file
"""
import imp
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = imp.load_source("verfile", version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
|
"""
automatically maintains the latest git tag + revision info in a python file
"""
import importlib
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = importlib.load_module(version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
|
Migrate from deprecated imp to importlib
|
Migrate from deprecated imp to importlib
|
Python
|
apache-2.0
|
aiven/aiven-client
|
"""
automatically maintains the latest git tag + revision info in a python file
"""
import imp
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = imp.load_source("verfile", version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
Migrate from deprecated imp to importlib
|
"""
automatically maintains the latest git tag + revision info in a python file
"""
import importlib
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = importlib.load_module(version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
|
<commit_before>"""
automatically maintains the latest git tag + revision info in a python file
"""
import imp
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = imp.load_source("verfile", version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
<commit_msg>Migrate from deprecated imp to importlib<commit_after>
|
"""
automatically maintains the latest git tag + revision info in a python file
"""
import importlib
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = importlib.load_module(version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
|
"""
automatically maintains the latest git tag + revision info in a python file
"""
import imp
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = imp.load_source("verfile", version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
Migrate from deprecated imp to importlib"""
automatically maintains the latest git tag + revision info in a python file
"""
import importlib
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = importlib.load_module(version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
|
<commit_before>"""
automatically maintains the latest git tag + revision info in a python file
"""
import imp
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = imp.load_source("verfile", version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
<commit_msg>Migrate from deprecated imp to importlib<commit_after>"""
automatically maintains the latest git tag + revision info in a python file
"""
import importlib
import os
import subprocess
def get_project_version(version_file):
version_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), version_file)
try:
module = importlib.load_module(version_file)
file_ver = module.__version__
except: # pylint: disable=bare-except
file_ver = None
try:
proc = subprocess.Popen(["git", "describe", "--always"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = proc.communicate()
if stdout:
git_ver = stdout.splitlines()[0].strip().decode("utf-8")
if git_ver and ((git_ver != file_ver) or not file_ver):
open(version_file, "w").write("__version__ = '%s'\n" % git_ver)
return git_ver
except OSError:
pass
if not file_ver:
raise Exception("version not available from git or from file %r"
% version_file)
return file_ver
if __name__ == "__main__":
import sys
get_project_version(sys.argv[1])
|
e799c94932aa3a9284d99739502645abc47aa8b4
|
docs/conf.py
|
docs/conf.py
|
import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
|
import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
|
Add package version to rst_prolog.
|
Add package version to rst_prolog.
|
Python
|
bsd-3-clause
|
STIXProject/stix-validator,pombredanne/stix-validator
|
import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
Add package version to rst_prolog.
|
import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
|
<commit_before>import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
<commit_msg>Add package version to rst_prolog.<commit_after>
|
import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
|
import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
Add package version to rst_prolog.import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
|
<commit_before>import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
<commit_msg>Add package version to rst_prolog.<commit_after>import os
import sdv
project = u'stix-validator'
copyright = u'2015, The MITRE Corporation'
version = sdv.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinxcontrib.napoleon',
]
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = ['_build']
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
html_show_sourcelink = False
htmlhelp_basename = 'stix-validatordoc'
latex_elements = {}
latex_documents = [
('index', 'stix-validator.tex', u'stix-validator Documentation',
u'The MITRE Corporation', 'manual'),
]
|
80002186ecec3a50d13d04e8467d8d7456d189b5
|
webdriver/webdriver-w3c-tests/base_test.py
|
webdriver/webdriver-w3c-tests/base_test.py
|
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkDriverTestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
|
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkdrivertestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
|
Update activity name for crosswalk android package rule changed
|
[xwalkdriver] Update activity name for crosswalk android package rule changed
|
Python
|
bsd-3-clause
|
YongseopKim/crosswalk-test-suite,kangxu/crosswalk-test-suite,jacky-young/crosswalk-test-suite,kangxu/crosswalk-test-suite,ibelem/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,kangxu/crosswalk-test-suite,xiaojunwu/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,Honry/crosswalk-test-suite,zqzhang/crosswalk-test-suite,ibelem/crosswalk-test-suite,BruceDai/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,jacky-young/crosswalk-test-suite,xiaojunwu/crosswalk-test-suite,zqzhang/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,Honry/crosswalk-test-suite,jiajiax/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,ibelem/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,pk-sam/crosswalk-test-suite,yhe39/crosswalk-test-suite,chunywang/crosswalk-test-suite,kangxu/crosswalk-test-suite,yhe39/crosswalk-test-suite,BruceDai/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,YongseopKim/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,chunywang/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,chunywang/crosswalk-test-suite,zqzhang/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,jacky-young/crosswalk-test-suite,yunxliu/crosswalk-test-suite,Honry/crosswalk-test-suite,pk-sam/crosswalk-test-suite,yunxliu/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,jacky-young/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,kangxu/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,yugang/crosswalk-test-suite,BruceDai/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,zqzhang/crosswalk-test-suite,haoxli/crosswalk-test-suite,pk-sam/crosswalk-test-suite,haoxli/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,Honry/crosswalk-test-suite,ibelem/crosswalk-test-suite,zqzhang/crosswalk-test-suite,YongseopKim/crosswalk-test-suite,ibelem/crosswalk-test-suite,Honry/crosswalk-test-suite,jiajiax/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,YongseopKim/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,BruceDai/crosswalk-test-suite,haoxli/crosswalk-test-suite,yhe39/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,ibelem/crosswalk-test-suite,chunywang/crosswalk-test-suite,yugang/crosswalk-test-suite,kangxu/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,xiaojunwu/crosswalk-test-suite,yugang/crosswalk-test-suite,jiajiax/crosswalk-test-suite,haoxli/crosswalk-test-suite,zqzhang/crosswalk-test-suite,yunxliu/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,ibelem/crosswalk-test-suite,xiaojunwu/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,yhe39/crosswalk-test-suite,Honry/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,yhe39/crosswalk-test-suite,Honry/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,yhe39/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,yunxliu/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,haoxli/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,yugang/crosswalk-test-suite,BruceDai/crosswalk-test-suite,yunxliu/crosswalk-test-suite,jiajiax/crosswalk-test-suite,haoxli/crosswalk-test-suite,jacky-young/crosswalk-test-suite,ibelem/crosswalk-test-suite,BruceDai/crosswalk-test-suite,yugang/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,pk-sam/crosswalk-test-suite,yugang/crosswalk-test-suite,zqzhang/crosswalk-test-suite,xiaojunwu/crosswalk-test-suite,chunywang/crosswalk-test-suite,YongseopKim/crosswalk-test-suite,haoyunfeix/crosswalk-test-suite,yugang/crosswalk-test-suite,YongseopKim/crosswalk-test-suite,crosswalk-project/crosswalk-test-suite,xiaojunwu/crosswalk-test-suite,zhuyongyong/crosswalk-test-suite,kangxu/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,jiajiax/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,BruceDai/crosswalk-test-suite,pk-sam/crosswalk-test-suite,chunywang/crosswalk-test-suite,chunywang/crosswalk-test-suite,jacky-young/crosswalk-test-suite,haoxli/crosswalk-test-suite,jiajiax/crosswalk-test-suite,YongseopKim/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,jiajiax/crosswalk-test-suite,yhe39/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,wanghongjuan/crosswalk-test-suite,yunxliu/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,Honry/crosswalk-test-suite,Shao-Feng/crosswalk-test-suite,chunywang/crosswalk-test-suite,pk-sam/crosswalk-test-suite,kangxu/crosswalk-test-suite,yunxliu/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,XiaosongWei/crosswalk-test-suite,yhe39/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,kaixinjxq/crosswalk-test-suite,zqzhang/crosswalk-test-suite,haoxli/crosswalk-test-suite,BruceDai/crosswalk-test-suite,JianfengXu/crosswalk-test-suite,yunxliu/crosswalk-test-suite,qiuzhong/crosswalk-test-suite,pk-sam/crosswalk-test-suite
|
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkDriverTestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
[xwalkdriver] Update activity name for crosswalk android package rule changed
|
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkdrivertestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
|
<commit_before>
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkDriverTestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
<commit_msg>[xwalkdriver] Update activity name for crosswalk android package rule changed<commit_after>
|
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkdrivertestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
|
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkDriverTestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
[xwalkdriver] Update activity name for crosswalk android package rule changed
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkdrivertestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
|
<commit_before>
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkDriverTestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
<commit_msg>[xwalkdriver] Update activity name for crosswalk android package rule changed<commit_after>
import ConfigParser
import json
import os
import unittest
from webserver import Httpd
from network import get_lan_ip
from selenium import webdriver
class WebDriverBaseTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.driver = create_driver()
cls.webserver = Httpd(host=get_lan_ip())
cls.webserver.start()
@classmethod
def tearDownClass(cls):
cls.webserver.stop()
if cls.driver:
cls.driver.quit()
def create_driver():
capabilities = {
'xwalkOptions': {
'androidPackage': 'org.xwalk.xwalkdrivertest',
'androidActivity': '.XwalkdrivertestActivity',
}
}
return webdriver.Remote('http://localhost:9515', capabilities)
|
d641d7d843899258d88da0d1dffaa762c1378712
|
opps/fields/widgets.py
|
opps/fields/widgets.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, Option, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
values = json.loads(value)
objs = self.model.objects.all()
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
|
Fix bug TypeError, not exist values (json) is dict None
|
Fix bug TypeError, not exist values (json) is dict None
|
Python
|
mit
|
williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,opps/opps,YACOWS/opps,opps/opps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, Option, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
values = json.loads(value)
objs = self.model.objects.all()
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
Fix bug TypeError, not exist values (json) is dict None
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, Option, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
values = json.loads(value)
objs = self.model.objects.all()
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
<commit_msg>Fix bug TypeError, not exist values (json) is dict None<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, Option, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
values = json.loads(value)
objs = self.model.objects.all()
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
Fix bug TypeError, not exist values (json) is dict None#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, Option, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
values = json.loads(value)
objs = self.model.objects.all()
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
<commit_msg>Fix bug TypeError, not exist values (json) is dict None<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*
import json
from django import forms
from django.template.loader import render_to_string
from .models import Field, FieldOption
class JSONField(forms.TextInput):
model = Field
def render(self, name, value, attrs=None):
elements = []
try:
values = json.loads(value)
except TypeError:
values = {}
objs = self.model.objects.filter(
application__contains=self.attrs.get('_model', None))
for obj in objs:
o = {}
o['name'] = obj.name
o['slug'] = obj.slug
element_attr = {}
element_attr['name'] = obj.name
element_attr['slug'] = obj.slug
"""
element_attr['value'] = '1'
element_attr['obj_value'] = values.get(obj.slug, '')
"""
if obj.type in ["checkbox", "radiobox"]:
obj_value = []
fo = FieldOption.objects.filter(field=obj)
for i in fo:
key = "{}_{}".format(obj.slug, i.option.slug)
obj_value.append(values.get(key, ''))
element_attr['list'] = zip(fo, obj_value)
o['element'] = render_to_string(
"admin/opps/fields/json_{}.html".format(obj.type),
dictionary=element_attr
)
elements.append(o)
return render_to_string(
"admin/opps/fields/json.html",
{"elements": elements,
"name": name,
"value": value})
|
ea57d89c1acc82a473a648f1c53430fadc27f7b2
|
opps/polls/__init__.py
|
opps/polls/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, YACOWS"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Projects"
|
Fix Copyright application, YACOWS to Opps Projects
|
Fix Copyright application, YACOWS to Opps Projects
|
Python
|
mit
|
opps/opps-polls,opps/opps-polls
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, YACOWS"
Fix Copyright application, YACOWS to Opps Projects
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Projects"
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, YACOWS"
<commit_msg>Fix Copyright application, YACOWS to Opps Projects<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Projects"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, YACOWS"
Fix Copyright application, YACOWS to Opps Projects#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Projects"
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, YACOWS"
<commit_msg>Fix Copyright application, YACOWS to Opps Projects<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
VERSION = (0, 1, 4)
__version__ = ".".join(map(str, VERSION))
__status__ = "Development"
__description__ = u"Poll App for Opps CMS"
__author__ = u"Bruno Cezar Rocha"
__credits__ = []
__email__ = u"rochacbruno@gmail.com"
__license__ = u"MIT License"
__copyright__ = u"Copyright 2013, Opps Projects"
|
e2282e8832bcf1f2621dc56488fec5eb9f1b1acc
|
src/sentry/api/endpoints/group_events.py
|
src/sentry/api/endpoints/group_events.py
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__icontains=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
Make event search actually useful
|
Make event search actually useful
|
Python
|
bsd-3-clause
|
looker/sentry,zenefits/sentry,zenefits/sentry,zenefits/sentry,jean/sentry,mvaled/sentry,ifduyue/sentry,beeftornado/sentry,beeftornado/sentry,JamesMura/sentry,fotinakis/sentry,looker/sentry,alexm92/sentry,beeftornado/sentry,looker/sentry,alexm92/sentry,JamesMura/sentry,JackDanger/sentry,mvaled/sentry,jean/sentry,JackDanger/sentry,gencer/sentry,daevaorn/sentry,jean/sentry,ifduyue/sentry,JamesMura/sentry,nicholasserra/sentry,zenefits/sentry,BuildingLink/sentry,ifduyue/sentry,nicholasserra/sentry,mvaled/sentry,mvaled/sentry,mitsuhiko/sentry,looker/sentry,gencer/sentry,daevaorn/sentry,BuildingLink/sentry,JamesMura/sentry,gencer/sentry,ifduyue/sentry,nicholasserra/sentry,jean/sentry,JackDanger/sentry,gencer/sentry,mitsuhiko/sentry,fotinakis/sentry,gencer/sentry,alexm92/sentry,fotinakis/sentry,zenefits/sentry,jean/sentry,fotinakis/sentry,mvaled/sentry,looker/sentry,daevaorn/sentry,BuildingLink/sentry,ifduyue/sentry,mvaled/sentry,BuildingLink/sentry,daevaorn/sentry,JamesMura/sentry,BuildingLink/sentry
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
Make event search actually useful
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__icontains=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
<commit_before>from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
<commit_msg>Make event search actually useful<commit_after>
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__icontains=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
Make event search actually usefulfrom __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__icontains=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
<commit_before>from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__iexact=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
<commit_msg>Make event search actually useful<commit_after>from __future__ import absolute_import
from sentry.api.base import DocSection
from sentry.api.bases import GroupEndpoint
from sentry.api.serializers import serialize
from sentry.api.paginator import DateTimePaginator
from sentry.models import Event, Group
from sentry.utils.apidocs import scenario, attach_scenarios
@scenario('ListAvailableSamples')
def list_available_samples_scenario(runner):
group = Group.objects.filter(project=runner.default_project).first()
runner.request(
method='GET',
path='/issues/%s/events/' % group.id
)
class GroupEventsEndpoint(GroupEndpoint):
doc_section = DocSection.EVENTS
@attach_scenarios([list_available_samples_scenario])
def get(self, request, group):
"""
List an Issue's Events
``````````````````````
This endpoint lists an issue's events.
:pparam string issue_id: the ID of the issue to retrieve.
:auth: required
"""
events = Event.objects.filter(
group=group
)
query = request.GET.get('query')
if query:
events = events.filter(
message__icontains=query,
)
return self.paginate(
request=request,
queryset=events,
order_by='-datetime',
on_results=lambda x: serialize(x, request.user),
paginator_cls=DateTimePaginator,
)
|
ebfeba2704dc73c136fa2ed217ef4337265b92dd
|
addie/utilities/__init__.py
|
addie/utilities/__init__.py
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# directory containing this file
filename = __file__
if not os.path.isdir(filename):
filename = os.path.split(filename)[0]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(filename, '..', '..', 'designer')
# put together the full path to the ui file
filename = os.path.join(filename, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
Fix path for designer directory with ui files
|
Fix path for designer directory with ui files
|
Python
|
mit
|
neutrons/FastGR,neutrons/FastGR,neutrons/FastGR
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# directory containing this file
filename = __file__
if not os.path.isdir(filename):
filename = os.path.split(filename)[0]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(filename, '..', '..', 'designer')
# put together the full path to the ui file
filename = os.path.join(filename, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
Fix path for designer directory with ui files
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
<commit_before>import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# directory containing this file
filename = __file__
if not os.path.isdir(filename):
filename = os.path.split(filename)[0]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(filename, '..', '..', 'designer')
# put together the full path to the ui file
filename = os.path.join(filename, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
<commit_msg>Fix path for designer directory with ui files<commit_after>
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# directory containing this file
filename = __file__
if not os.path.isdir(filename):
filename = os.path.split(filename)[0]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(filename, '..', '..', 'designer')
# put together the full path to the ui file
filename = os.path.join(filename, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
Fix path for designer directory with ui filesimport os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
<commit_before>import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
ui_filename = os.path.split(ui_filename)[-1]
# directory containing this file
filename = __file__
if not os.path.isdir(filename):
filename = os.path.split(filename)[0]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(filename, '..', '..', 'designer')
# put together the full path to the ui file
filename = os.path.join(filename, ui_filename)
return loadUi(filename, baseinstance=baseinstance)
<commit_msg>Fix path for designer directory with ui files<commit_after>import os
from qtpy.uic import loadUi
def load_ui(ui_filename, baseinstance):
cwd = os.getcwd()
ui_filename = os.path.split(ui_filename)[-1]
# get the location of the designer directory
# this function assumes that all ui files are there
filename = os.path.join(cwd, 'designer', ui_filename)
return loadUi(filename, baseinstance=baseinstance)
|
e2bee28d9b9f5034fe1c92b94e08fd73d2050b30
|
PythonClient/index.py
|
PythonClient/index.py
|
# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat message', on_chat_message_response)
print('hello')
|
# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat_message', on_chat_message_response)
print('hello')
|
Correct chat message event name
|
Correct chat message event name
|
Python
|
mit
|
yakovenkodenis/websockets_secure_chat,yakovenkodenis/websockets_secure_chat,yakovenkodenis/websockets_secure_chat,yakovenkodenis/websockets_secure_chat
|
# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat message', on_chat_message_response)
print('hello')Correct chat message event name
|
# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat_message', on_chat_message_response)
print('hello')
|
<commit_before># -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat message', on_chat_message_response)
print('hello')<commit_msg>Correct chat message event name<commit_after>
|
# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat_message', on_chat_message_response)
print('hello')
|
# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat message', on_chat_message_response)
print('hello')Correct chat message event name# -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat_message', on_chat_message_response)
print('hello')
|
<commit_before># -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat message', on_chat_message_response)
print('hello')<commit_msg>Correct chat message event name<commit_after># -*- coding: utf-8 -*-
from socketIO_client import SocketIO, LoggingNamespace
def on_bbb_response(*args):
print('on_bbb_response', args)
def on_chat_message_response(*args):
print(args)
with SocketIO('http://192.168.0.110', 8080, LoggingNamespace) as socketIO:
socketIO.emit('hello', {'xxx': 'yyy'}, on_bbb_response)
# socketIO.wait_for_callbacks(seconds=1)
socketIO.on('chat_message', on_chat_message_response)
print('hello')
|
631665a8aeee54d5094480ddf4140a61dce4a960
|
ostinato/blog/apps.py
|
ostinato/blog/apps.py
|
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
|
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
|
Correct app label of ostinato_blog
|
Correct app label of ostinato_blog
|
Python
|
mit
|
andrewebdev/django-ostinato,andrewebdev/django-ostinato,andrewebdev/django-ostinato
|
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
Correct app label of ostinato_blog
|
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
|
<commit_before>from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
<commit_msg>Correct app label of ostinato_blog<commit_after>
|
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
|
from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
Correct app label of ostinato_blogfrom django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
|
<commit_before>from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ost_blog'
verbose_name = 'Ostinato Blog Engine'
<commit_msg>Correct app label of ostinato_blog<commit_after>from django.apps import AppConfig
class OstinatoBlogConfig(AppConfig):
name = 'ostinato.blog'
label = 'ostinato_blog'
verbose_name = 'Ostinato Blog Engine'
|
7643635278fc1c92289e8fdd456614ce85a2c2f3
|
addons/osfstorage/models.py
|
addons/osfstorage/models.py
|
import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
pass
|
import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
# Required overrides
complete = True
has_auth = True
|
Add required overrides to osfstorage.NodeSettings
|
Add required overrides to osfstorage.NodeSettings
|
Python
|
apache-2.0
|
felliott/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,Nesiehr/osf.io,alexschiller/osf.io,aaxelb/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,mluo613/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,leb2dg/osf.io,cslzchen/osf.io,alexschiller/osf.io,hmoco/osf.io,mfraezz/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,brianjgeiger/osf.io,acshi/osf.io,acshi/osf.io,leb2dg/osf.io,Nesiehr/osf.io,erinspace/osf.io,erinspace/osf.io,chrisseto/osf.io,alexschiller/osf.io,binoculars/osf.io,icereval/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,Nesiehr/osf.io,crcresearch/osf.io,adlius/osf.io,adlius/osf.io,chrisseto/osf.io,sloria/osf.io,sloria/osf.io,chennan47/osf.io,laurenrevere/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,chennan47/osf.io,leb2dg/osf.io,icereval/osf.io,felliott/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,acshi/osf.io,erinspace/osf.io,baylee-d/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,mfraezz/osf.io,leb2dg/osf.io,baylee-d/osf.io,acshi/osf.io,aaxelb/osf.io,icereval/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,pattisdr/osf.io,hmoco/osf.io,caseyrollins/osf.io,chennan47/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,felliott/osf.io,Johnetordoff/osf.io,mluo613/osf.io,binoculars/osf.io,mluo613/osf.io,acshi/osf.io,adlius/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,aaxelb/osf.io,sloria/osf.io,cwisecarver/osf.io,mluo613/osf.io,adlius/osf.io,alexschiller/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,crcresearch/osf.io,mfraezz/osf.io,caneruguz/osf.io,hmoco/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,mattclark/osf.io,chrisseto/osf.io,crcresearch/osf.io,pattisdr/osf.io,binoculars/osf.io,pattisdr/osf.io,felliott/osf.io,aaxelb/osf.io,hmoco/osf.io
|
import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
pass
Add required overrides to osfstorage.NodeSettings
|
import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
# Required overrides
complete = True
has_auth = True
|
<commit_before>import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
pass
<commit_msg>Add required overrides to osfstorage.NodeSettings<commit_after>
|
import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
# Required overrides
complete = True
has_auth = True
|
import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
pass
Add required overrides to osfstorage.NodeSettingsimport logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
# Required overrides
complete = True
has_auth = True
|
<commit_before>import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
pass
<commit_msg>Add required overrides to osfstorage.NodeSettings<commit_after>import logging
from addons.base.models import BaseNodeSettings, BaseStorageAddon
logger = logging.getLogger(__name__)
class NodeSettings(BaseStorageAddon, BaseNodeSettings):
# Required overrides
complete = True
has_auth = True
|
3875b14e6c94c4a6a7ad47a3eb55cae62096d0e4
|
agateremote/table_remote.py
|
agateremote/table_remote.py
|
#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
|
#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, requests_encoding=None, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param requests_encoding:
An encoding to pass to requests for use when decoding the response
content. (e.g. force use of 'utf-8-sig' when CSV has a BOM).
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if requests_encoding:
r.encoding = requests_encoding
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
|
Add 'requests_encoding' parameter Allows user to override Requests' 'educated guess' about encoding of a response. Useful when loading a remote CSV that has a BOM that has been served with a 'text/csv' content-type, which Requests guesses needs a 'ISO-8859-1' encoding.
|
Add 'requests_encoding' parameter
Allows user to override Requests' 'educated guess' about encoding of a response. Useful when loading a remote CSV that has a BOM that has been served with a 'text/csv' content-type, which Requests guesses needs a 'ISO-8859-1' encoding.
|
Python
|
mit
|
wireservice/agate-remote
|
#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
Add 'requests_encoding' parameter
Allows user to override Requests' 'educated guess' about encoding of a response. Useful when loading a remote CSV that has a BOM that has been served with a 'text/csv' content-type, which Requests guesses needs a 'ISO-8859-1' encoding.
|
#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, requests_encoding=None, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param requests_encoding:
An encoding to pass to requests for use when decoding the response
content. (e.g. force use of 'utf-8-sig' when CSV has a BOM).
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if requests_encoding:
r.encoding = requests_encoding
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
|
<commit_before>#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
<commit_msg>Add 'requests_encoding' parameter
Allows user to override Requests' 'educated guess' about encoding of a response. Useful when loading a remote CSV that has a BOM that has been served with a 'text/csv' content-type, which Requests guesses needs a 'ISO-8859-1' encoding.<commit_after>
|
#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, requests_encoding=None, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param requests_encoding:
An encoding to pass to requests for use when decoding the response
content. (e.g. force use of 'utf-8-sig' when CSV has a BOM).
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if requests_encoding:
r.encoding = requests_encoding
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
|
#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
Add 'requests_encoding' parameter
Allows user to override Requests' 'educated guess' about encoding of a response. Useful when loading a remote CSV that has a BOM that has been served with a 'text/csv' content-type, which Requests guesses needs a 'ISO-8859-1' encoding.#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, requests_encoding=None, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param requests_encoding:
An encoding to pass to requests for use when decoding the response
content. (e.g. force use of 'utf-8-sig' when CSV has a BOM).
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if requests_encoding:
r.encoding = requests_encoding
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
|
<commit_before>#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
<commit_msg>Add 'requests_encoding' parameter
Allows user to override Requests' 'educated guess' about encoding of a response. Useful when loading a remote CSV that has a BOM that has been served with a 'text/csv' content-type, which Requests guesses needs a 'ISO-8859-1' encoding.<commit_after>#!/usr/bin/env python
"""
This module contains the Remote extension to :class:`Table <agate.table.Table>`.
"""
import agate
import requests
import six
def from_url(cls, url, callback=agate.Table.from_csv, requests_encoding=None, binary=False, **kwargs):
"""
Download a remote file and pass it to a :class:`.Table` parser.
:param url:
URL to a file to load.
:param callback:
The method to invoke to create the table. Typically either
:meth:`agate.Table.from_csv` or :meth:`agate.Table.from_json`, but
it could also be a method provided by an extension.
:param requests_encoding:
An encoding to pass to requests for use when decoding the response
content. (e.g. force use of 'utf-8-sig' when CSV has a BOM).
:param binary:
If :code:`False` the downloaded data will be processed as a string,
otherwise it will be treated as binary data. (e.g. for Excel files)
"""
r = requests.get(url)
if requests_encoding:
r.encoding = requests_encoding
if binary:
content = six.BytesIO(r.content)
else:
if six.PY2:
content = six.StringIO(r.content.decode('utf-8'))
else:
content = six.StringIO(r.text)
return callback(content, **kwargs)
agate.Table.from_url = classmethod(from_url)
|
9262dad14237d57a3817a199f9a8b04371de9607
|
mis_bot/scraper/database.py
|
mis_bot/scraper/database.py
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import StaticPool
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True,
connect_args= {'check_same_thread': False},
poolclass=StaticPool)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)
|
Allow sharing db connection across threads
|
Allow sharing db connection across threads
|
Python
|
mit
|
ArionMiles/MIS-Bot
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)Allow sharing db connection across threads
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import StaticPool
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True,
connect_args= {'check_same_thread': False},
poolclass=StaticPool)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)
|
<commit_before>from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)<commit_msg>Allow sharing db connection across threads<commit_after>
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import StaticPool
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True,
connect_args= {'check_same_thread': False},
poolclass=StaticPool)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)
|
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)Allow sharing db connection across threadsfrom sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import StaticPool
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True,
connect_args= {'check_same_thread': False},
poolclass=StaticPool)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)
|
<commit_before>from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)<commit_msg>Allow sharing db connection across threads<commit_after>from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.pool import StaticPool
# Database
engine = create_engine('sqlite:///files/chats.db', convert_unicode=True,
connect_args= {'check_same_thread': False},
poolclass=StaticPool)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
# import all modules here that might define models so that
# they will be registered properly on the metadata. Otherwise
# you will have to import them first before calling init_db()
import scraper.models
Base.metadata.create_all(bind=engine)
|
a04116d32931c5e85de417b5da048c91d495261b
|
pyeventstore/client.py
|
pyeventstore/client.py
|
import asyncio
import uuid
import json
import requests
from requests.exceptions import HTTPError
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
from pyeventstore.stream_page import StreamPage
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
def get_projection(self, projection_name):
uri = self.uri_base + '/projection/{}'.format(projection_name)
headers = {'Accept': 'application/json'}
response = requests.get(uri, headers=headers)
return response.json()
def get_projection_state(self, projection_name, partition=None):
uri = self.uri_base + '/projection/{}/state'.format(projection_name)
headers = {'Accept': 'application/json'}
params = {}
if partition:
params['partition'] = partition
response = requests.get(uri, headers=headers, params=params)
return response.json()
|
import asyncio
import uuid
import json
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
|
Remove projections methods for now
|
Remove projections methods for now
|
Python
|
mit
|
cjlarose/pyeventstore
|
import asyncio
import uuid
import json
import requests
from requests.exceptions import HTTPError
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
from pyeventstore.stream_page import StreamPage
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
def get_projection(self, projection_name):
uri = self.uri_base + '/projection/{}'.format(projection_name)
headers = {'Accept': 'application/json'}
response = requests.get(uri, headers=headers)
return response.json()
def get_projection_state(self, projection_name, partition=None):
uri = self.uri_base + '/projection/{}/state'.format(projection_name)
headers = {'Accept': 'application/json'}
params = {}
if partition:
params['partition'] = partition
response = requests.get(uri, headers=headers, params=params)
return response.json()
Remove projections methods for now
|
import asyncio
import uuid
import json
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
|
<commit_before>import asyncio
import uuid
import json
import requests
from requests.exceptions import HTTPError
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
from pyeventstore.stream_page import StreamPage
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
def get_projection(self, projection_name):
uri = self.uri_base + '/projection/{}'.format(projection_name)
headers = {'Accept': 'application/json'}
response = requests.get(uri, headers=headers)
return response.json()
def get_projection_state(self, projection_name, partition=None):
uri = self.uri_base + '/projection/{}/state'.format(projection_name)
headers = {'Accept': 'application/json'}
params = {}
if partition:
params['partition'] = partition
response = requests.get(uri, headers=headers, params=params)
return response.json()
<commit_msg>Remove projections methods for now<commit_after>
|
import asyncio
import uuid
import json
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
|
import asyncio
import uuid
import json
import requests
from requests.exceptions import HTTPError
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
from pyeventstore.stream_page import StreamPage
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
def get_projection(self, projection_name):
uri = self.uri_base + '/projection/{}'.format(projection_name)
headers = {'Accept': 'application/json'}
response = requests.get(uri, headers=headers)
return response.json()
def get_projection_state(self, projection_name, partition=None):
uri = self.uri_base + '/projection/{}/state'.format(projection_name)
headers = {'Accept': 'application/json'}
params = {}
if partition:
params['partition'] = partition
response = requests.get(uri, headers=headers, params=params)
return response.json()
Remove projections methods for nowimport asyncio
import uuid
import json
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
|
<commit_before>import asyncio
import uuid
import json
import requests
from requests.exceptions import HTTPError
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
from pyeventstore.stream_page import StreamPage
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
def get_projection(self, projection_name):
uri = self.uri_base + '/projection/{}'.format(projection_name)
headers = {'Accept': 'application/json'}
response = requests.get(uri, headers=headers)
return response.json()
def get_projection_state(self, projection_name, partition=None):
uri = self.uri_base + '/projection/{}/state'.format(projection_name)
headers = {'Accept': 'application/json'}
params = {}
if partition:
params['partition'] = partition
response = requests.get(uri, headers=headers, params=params)
return response.json()
<commit_msg>Remove projections methods for now<commit_after>import asyncio
import uuid
import json
from pyeventstore.events import (get_all_events,
start_subscription,
publish_events)
class Client:
def __init__(self, host, secure=False, port=2113):
proto = "https" if secure else "http"
self.uri_base = '{}://{}:{}'.format(proto, host, port)
@asyncio.coroutine
def publish_events(self, stream_name, events):
uri = self.stream_head_uri(stream_name)
yield from publish_events(uri, events)
def stream_head_uri(self, stream_name):
return '{}/streams/{}'.format(self.uri_base, stream_name)
@asyncio.coroutine
def get_all_events(self, stream_name):
head_uri = self.stream_head_uri(stream_name)
return (yield from get_all_events(head_uri))
@asyncio.coroutine
def subscribe(self, stream_name, interval_seconds=1):
head_uri = self.stream_head_uri(stream_name)
return (yield from start_subscription(head_uri, interval_seconds))
|
5a3ffb93131c83f81eb123c2969714dcc80513ca
|
django/crashreport/processor/signals.py
|
django/crashreport/processor/signals.py
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(*args, **kwargs):
minproc = MinidumpProcessor()
minproc.process(kwargs['crash_id'])
logger.info('processed: %s' % (kwargs['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(env):
minproc = MinidumpProcessor()
minproc.process(env['crash_id'])
logger.info('processed: %s' % (env['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
Revert "make the uwsgi spooler code also work with the fallback"
|
Revert "make the uwsgi spooler code also work with the fallback"
This reverts commit 84ab847b444fbd41b9cc17e5c79a609efdcdf6cf.
|
Python
|
mpl-2.0
|
mmohrhard/crash,mmohrhard/crash,Liongold/crash,Liongold/crash,mmohrhard/crash,Liongold/crash
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(*args, **kwargs):
minproc = MinidumpProcessor()
minproc.process(kwargs['crash_id'])
logger.info('processed: %s' % (kwargs['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
Revert "make the uwsgi spooler code also work with the fallback"
This reverts commit 84ab847b444fbd41b9cc17e5c79a609efdcdf6cf.
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(env):
minproc = MinidumpProcessor()
minproc.process(env['crash_id'])
logger.info('processed: %s' % (env['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
<commit_before># -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(*args, **kwargs):
minproc = MinidumpProcessor()
minproc.process(kwargs['crash_id'])
logger.info('processed: %s' % (kwargs['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
<commit_msg>Revert "make the uwsgi spooler code also work with the fallback"
This reverts commit 84ab847b444fbd41b9cc17e5c79a609efdcdf6cf.<commit_after>
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(env):
minproc = MinidumpProcessor()
minproc.process(env['crash_id'])
logger.info('processed: %s' % (env['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(*args, **kwargs):
minproc = MinidumpProcessor()
minproc.process(kwargs['crash_id'])
logger.info('processed: %s' % (kwargs['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
Revert "make the uwsgi spooler code also work with the fallback"
This reverts commit 84ab847b444fbd41b9cc17e5c79a609efdcdf6cf.# -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(env):
minproc = MinidumpProcessor()
minproc.process(env['crash_id'])
logger.info('processed: %s' % (env['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
<commit_before># -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(*args, **kwargs):
minproc = MinidumpProcessor()
minproc.process(kwargs['crash_id'])
logger.info('processed: %s' % (kwargs['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
<commit_msg>Revert "make the uwsgi spooler code also work with the fallback"
This reverts commit 84ab847b444fbd41b9cc17e5c79a609efdcdf6cf.<commit_after># -*- Mode: python; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
from .processor import MinidumpProcessor
from uwsgidecoratorsfallback import spool
import logging
logger = logging.getLogger(__name__)
@spool
def do_process_uploaded_crash(env):
minproc = MinidumpProcessor()
minproc.process(env['crash_id'])
logger.info('processed: %s' % (env['crash_id']))
# vim:set shiftwidth=4 softtabstop=4 expandtab: */
|
98dafbc7578209b9768e6ca6ccfa7854f70deb16
|
runTwircBot.py
|
runTwircBot.py
|
from TwircBot import TwircBot as tw
bot = tw("config/sampleConfig.sample")
bot.print_config()
bot.connect()
|
from TwircBot import TwircBot as tw
import sys
bot = tw(sys.argv[1])
bot.print_config()
bot.connect()
|
Modify runTwirc.py to accept system arguments
|
Modify runTwirc.py to accept system arguments
|
Python
|
mit
|
johnmarcampbell/twircBot
|
from TwircBot import TwircBot as tw
bot = tw("config/sampleConfig.sample")
bot.print_config()
bot.connect()
Modify runTwirc.py to accept system arguments
|
from TwircBot import TwircBot as tw
import sys
bot = tw(sys.argv[1])
bot.print_config()
bot.connect()
|
<commit_before>from TwircBot import TwircBot as tw
bot = tw("config/sampleConfig.sample")
bot.print_config()
bot.connect()
<commit_msg>Modify runTwirc.py to accept system arguments<commit_after>
|
from TwircBot import TwircBot as tw
import sys
bot = tw(sys.argv[1])
bot.print_config()
bot.connect()
|
from TwircBot import TwircBot as tw
bot = tw("config/sampleConfig.sample")
bot.print_config()
bot.connect()
Modify runTwirc.py to accept system argumentsfrom TwircBot import TwircBot as tw
import sys
bot = tw(sys.argv[1])
bot.print_config()
bot.connect()
|
<commit_before>from TwircBot import TwircBot as tw
bot = tw("config/sampleConfig.sample")
bot.print_config()
bot.connect()
<commit_msg>Modify runTwirc.py to accept system arguments<commit_after>from TwircBot import TwircBot as tw
import sys
bot = tw(sys.argv[1])
bot.print_config()
bot.connect()
|
f868b126b3bd81ec900f378ff1fa8bd29ab8ea4c
|
transformations/Transformations.py
|
transformations/Transformations.py
|
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = []
transformations.append(ButterFingersPerturbation())
transformations.append(WithoutPunctuation())
transformations.append(ChangeNamedEntities())
transformations.append(BackTranslation())
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
|
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = [ButterFingersPerturbation(), WithoutPunctuation(), ChangeNamedEntities(), BackTranslation()]
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
|
Add interface for source+label pertubation
|
Add interface for source+label pertubation
|
Python
|
mit
|
GEM-benchmark/NL-Augmenter
|
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = []
transformations.append(ButterFingersPerturbation())
transformations.append(WithoutPunctuation())
transformations.append(ChangeNamedEntities())
transformations.append(BackTranslation())
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
Add interface for source+label pertubation
|
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = [ButterFingersPerturbation(), WithoutPunctuation(), ChangeNamedEntities(), BackTranslation()]
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
|
<commit_before>from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = []
transformations.append(ButterFingersPerturbation())
transformations.append(WithoutPunctuation())
transformations.append(ChangeNamedEntities())
transformations.append(BackTranslation())
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
<commit_msg>Add interface for source+label pertubation<commit_after>
|
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = [ButterFingersPerturbation(), WithoutPunctuation(), ChangeNamedEntities(), BackTranslation()]
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
|
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = []
transformations.append(ButterFingersPerturbation())
transformations.append(WithoutPunctuation())
transformations.append(ChangeNamedEntities())
transformations.append(BackTranslation())
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
Add interface for source+label pertubation
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = [ButterFingersPerturbation(), WithoutPunctuation(), ChangeNamedEntities(), BackTranslation()]
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
|
<commit_before>from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = []
transformations.append(ButterFingersPerturbation())
transformations.append(WithoutPunctuation())
transformations.append(ChangeNamedEntities())
transformations.append(BackTranslation())
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
<commit_msg>Add interface for source+label pertubation<commit_after>
from transformations.BackTranslation import BackTranslation
from transformations.ButterFingersPerturbation import ButterFingersPerturbation
from transformations.ChangeNamedEntities import ChangeNamedEntities
from transformations.SentenceTransformation import SentenceTransformation
from transformations.WithoutPunctuation import WithoutPunctuation
class TransformationsList(SentenceTransformation):
def __init__(self):
transformations = [ButterFingersPerturbation(), WithoutPunctuation(), ChangeNamedEntities(), BackTranslation()]
self.transformations = transformations
def generate(self, sentence: str):
print(f"Original Input : {sentence}")
generations = {"Original": sentence}
for transformation in self.transformations:
generations[transformation.name()] = transformation.generate(sentence)
return generations
|
44226dabb65cb06522c128539660e407e53ca602
|
parseHTML.py
|
parseHTML.py
|
from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
|
from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
file = open('test/data.csv', 'w+')
file.write(num + ",")
|
Write information from test.html to csv file
|
Write information from test.html to csv file
|
Python
|
mit
|
tfahl/printfo,tfahl/printfo
|
from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
Write information from test.html to csv file
|
from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
file = open('test/data.csv', 'w+')
file.write(num + ",")
|
<commit_before>from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
<commit_msg>Write information from test.html to csv file<commit_after>
|
from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
file = open('test/data.csv', 'w+')
file.write(num + ",")
|
from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
Write information from test.html to csv filefrom bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
file = open('test/data.csv', 'w+')
file.write(num + ",")
|
<commit_before>from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
<commit_msg>Write information from test.html to csv file<commit_after>from bs4 import BeautifulSoup
#uses beautiful soup to parse html file
#finds the correct span tag
#Gets the percentage of ink left in the printer
soup = BeautifulSoup(open("test/test.html"))
res = soup.find('span',{'class':'hpConsumableBlockHeaderText'}).text
num = res[24] + res[25]
file = open('test/data.csv', 'w+')
file.write(num + ",")
|
f820ef6cef8037942d18dcc912fb6de093ecc8de
|
txircd/modules/rfc/cmd_userhost.py
|
txircd/modules/rfc/cmd_userhost.py
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if targetUser.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
|
Check away status of the target, not user, of USERHOST
|
Check away status of the target, not user, of USERHOST
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()Check away status of the target, not user, of USERHOST
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if targetUser.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
|
<commit_before>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()<commit_msg>Check away status of the target, not user, of USERHOST<commit_after>
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if targetUser.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()Check away status of the target, not user, of USERHOSTfrom twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if targetUser.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
|
<commit_before>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()<commit_msg>Check away status of the target, not user, of USERHOST<commit_after>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if targetUser.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
|
36fe90ca170525cabc4f2a496a12a28c86b7e82d
|
uchicagohvz/production_settings.py
|
uchicagohvz/production_settings.py
|
from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
from local_settings import *
settings.DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
Set DEBUG = False in production
|
Set DEBUG = False in production
|
Python
|
mit
|
kz26/uchicago-hvz,kz26/uchicago-hvz,kz26/uchicago-hvz
|
from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = TrueSet DEBUG = False in production
|
from local_settings import *
settings.DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
<commit_before>from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True<commit_msg>Set DEBUG = False in production<commit_after>
|
from local_settings import *
settings.DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = TrueSet DEBUG = False in productionfrom local_settings import *
settings.DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
<commit_before>from local_settings import *
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True<commit_msg>Set DEBUG = False in production<commit_after>from local_settings import *
settings.DEBUG = False
ALLOWED_HOSTS = ['uchicagohvz.org']
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'uchicagohvz', # Or path to database file if using sqlite3.
'USER': 'user', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# REST framework settings
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
)
}
# Mandrill email settings
EMAIL_HOST = 'smtp.mandrillapp.com'
from secrets import EMAIL_HOST_USER, EMAIL_HOST_PASSWORD
EMAIL_PORT = '587'
EMAIL_USE_TLS = True
|
2b603ebe92e308aa78928772e8681f3cc46775cb
|
numba/cloudpickle/compat.py
|
numba/cloudpickle/compat.py
|
import sys
if sys.version_info < (3, 8):
try:
import pickle5 as pickle # noqa: F401
from pickle5 import Pickler # noqa: F401
except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
|
import sys
if sys.version_info < (3, 8):
# NOTE: pickle5 is disabled due to problems in testing.
# try:
# import pickle5 as pickle # noqa: F401
# from pickle5 import Pickler # noqa: F401
# except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
|
Disable pickle5 use in cloudpickle
|
Disable pickle5 use in cloudpickle
|
Python
|
bsd-2-clause
|
numba/numba,IntelLabs/numba,stuartarchibald/numba,stonebig/numba,seibert/numba,cpcloud/numba,stonebig/numba,cpcloud/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,cpcloud/numba,seibert/numba,cpcloud/numba,stonebig/numba,stonebig/numba,stuartarchibald/numba,seibert/numba,stuartarchibald/numba,stuartarchibald/numba,stonebig/numba,seibert/numba,IntelLabs/numba,numba/numba,numba/numba,IntelLabs/numba
|
import sys
if sys.version_info < (3, 8):
try:
import pickle5 as pickle # noqa: F401
from pickle5 import Pickler # noqa: F401
except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
Disable pickle5 use in cloudpickle
|
import sys
if sys.version_info < (3, 8):
# NOTE: pickle5 is disabled due to problems in testing.
# try:
# import pickle5 as pickle # noqa: F401
# from pickle5 import Pickler # noqa: F401
# except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
|
<commit_before>import sys
if sys.version_info < (3, 8):
try:
import pickle5 as pickle # noqa: F401
from pickle5 import Pickler # noqa: F401
except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
<commit_msg>Disable pickle5 use in cloudpickle<commit_after>
|
import sys
if sys.version_info < (3, 8):
# NOTE: pickle5 is disabled due to problems in testing.
# try:
# import pickle5 as pickle # noqa: F401
# from pickle5 import Pickler # noqa: F401
# except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
|
import sys
if sys.version_info < (3, 8):
try:
import pickle5 as pickle # noqa: F401
from pickle5 import Pickler # noqa: F401
except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
Disable pickle5 use in cloudpickleimport sys
if sys.version_info < (3, 8):
# NOTE: pickle5 is disabled due to problems in testing.
# try:
# import pickle5 as pickle # noqa: F401
# from pickle5 import Pickler # noqa: F401
# except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
|
<commit_before>import sys
if sys.version_info < (3, 8):
try:
import pickle5 as pickle # noqa: F401
from pickle5 import Pickler # noqa: F401
except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
<commit_msg>Disable pickle5 use in cloudpickle<commit_after>import sys
if sys.version_info < (3, 8):
# NOTE: pickle5 is disabled due to problems in testing.
# try:
# import pickle5 as pickle # noqa: F401
# from pickle5 import Pickler # noqa: F401
# except ImportError:
import pickle # noqa: F401
from pickle import _Pickler as Pickler # noqa: F401
else:
import pickle # noqa: F401
from _pickle import Pickler # noqa: F401
|
7e166ba983bd1470f6eac6776107a64539c38581
|
animal_spharm/test/test_animal_spharm.py
|
animal_spharm/test/test_animal_spharm.py
|
import numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
|
Copy over tests from aospy-obj-lib
|
Copy over tests from aospy-obj-lib
|
Python
|
apache-2.0
|
spencerahill/animal-spharm
|
Copy over tests from aospy-obj-lib
|
import numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
|
<commit_before><commit_msg>Copy over tests from aospy-obj-lib<commit_after>
|
import numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
|
Copy over tests from aospy-obj-libimport numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
|
<commit_before><commit_msg>Copy over tests from aospy-obj-lib<commit_after>import numpy as np
import pytest
import xray
from aospy_user import SpharmInterface
@pytest.fixture
def compute_vrtdiv(u, v):
sphint = SpharmInterface(u, v)
sphint.make_vectorwind()
sphint.make_spharmt()
vort, divg = sphint.vectorwind.vrtdiv()
return sphint.to_xray(vort), sphint.to_xray(divg)
def test_vrtdiv():
path = ('/archive/Spencer.Hill/am2/am2clim_reyoi/gfdl.ncrc2-default-prod/'
'pp/atmos_level/ts/monthly/1yr/atmos_level.198301-198312.')
# Vertically defined, sigma levels.
u_arr = xray.open_dataset(path + 'ucomp.nc').ucomp
v_arr = xray.open_dataset(path + 'vcomp.nc').vcomp
vort, divg = compute_vrtdiv(u_arr, v_arr)
assert vort.shape == u_arr.shape
assert divg.shape == u_arr.shape
np.testing.assert_array_equal(u_arr.lat, vort.lat)
np.testing.assert_array_equal(u_arr.lon, vort.lon)
np.testing.assert_array_equal(u_arr.time, vort.time)
np.testing.assert_array_equal(u_arr.pfull, vort.pfull)
# Not vertically defined.
u0 = u_arr[:,0]
v0 = v_arr[:,0]
vort0, divg0 = compute_vrtdiv(u0, v0)
assert vort0.shape == u0.shape
assert divg0.shape == u0.shape
# Dummy case: zeros everywhere
u_arr_zeros = xray.DataArray(np.zeros_like(u_arr.values), dims=u_arr.dims,
coords=u_arr.coords)
v_arr_zeros = u_arr_zeros.copy()
vort_zeros, divg_zeros = compute_vrtdiv(u_arr_zeros, v_arr_zeros)
assert not vort_zeros.any()
assert not divg_zeros.any()
|
|
bbd3b1939712d9784fe61884d9b06faa95c36006
|
tests/test_project/test_app/models.py
|
tests/test_project/test_app/models.py
|
from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image')
|
from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image', upload_to='uploads/')
|
Test compatibility with older Django versions.
|
Test compatibility with older Django versions.
|
Python
|
mit
|
dessibelle/sorl-thumbnail-serializer-field
|
from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image')
Test compatibility with older Django versions.
|
from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image', upload_to='uploads/')
|
<commit_before>from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image')
<commit_msg>Test compatibility with older Django versions.<commit_after>
|
from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image', upload_to='uploads/')
|
from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image')
Test compatibility with older Django versions.from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image', upload_to='uploads/')
|
<commit_before>from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image')
<commit_msg>Test compatibility with older Django versions.<commit_after>from django.db import models
class TestModel(models.Model):
name = models.CharField(max_length=63, unique=True, verbose_name='Name')
image = models.ImageField(verbose_name='Image', upload_to='uploads/')
|
59ce3ca9c1572dcf71aa5de5cdb354def594a36c
|
downloads/urls.py
|
downloads/urls.py
|
from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', partial(download_protected_file,
path_prefix='solutions/',
model_class=UserSolution),
name='download_solution'),
)
|
from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
)
|
Remove unnecessary usage of functools.partial
|
downloads: Remove unnecessary usage of functools.partial
|
Python
|
mit
|
matus-stehlik/roots,rtrembecky/roots,matus-stehlik/roots,tbabej/roots,rtrembecky/roots,tbabej/roots,tbabej/roots,rtrembecky/roots,matus-stehlik/roots
|
from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', partial(download_protected_file,
path_prefix='solutions/',
model_class=UserSolution),
name='download_solution'),
)
downloads: Remove unnecessary usage of functools.partial
|
from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
)
|
<commit_before>from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', partial(download_protected_file,
path_prefix='solutions/',
model_class=UserSolution),
name='download_solution'),
)
<commit_msg>downloads: Remove unnecessary usage of functools.partial<commit_after>
|
from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
)
|
from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', partial(download_protected_file,
path_prefix='solutions/',
model_class=UserSolution),
name='download_solution'),
)
downloads: Remove unnecessary usage of functools.partialfrom django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
)
|
<commit_before>from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', partial(download_protected_file,
path_prefix='solutions/',
model_class=UserSolution),
name='download_solution'),
)
<commit_msg>downloads: Remove unnecessary usage of functools.partial<commit_after>from django.conf.urls import patterns, url
from functools import partial
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(r'solutions/(?P<path>.*)$', download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
)
|
8cd2c9a4b9cf69a97e758d1e6416d2efbbf7028a
|
misc/python/botan/__init__.py
|
misc/python/botan/__init__.py
|
from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj();
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
|
from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj()
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
|
Remove a semicolon that didn't need to be there
|
Remove a semicolon that didn't need to be there
|
Python
|
bsd-2-clause
|
randombit/botan,randombit/botan,Rohde-Schwarz-Cybersecurity/botan,randombit/botan,Rohde-Schwarz-Cybersecurity/botan,Rohde-Schwarz-Cybersecurity/botan,Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,Rohde-Schwarz-Cybersecurity/botan,webmaster128/botan,randombit/botan,webmaster128/botan,webmaster128/botan,webmaster128/botan,Rohde-Schwarz-Cybersecurity/botan,randombit/botan
|
from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj();
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
Remove a semicolon that didn't need to be there
|
from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj()
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
|
<commit_before>from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj();
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
<commit_msg>Remove a semicolon that didn't need to be there<commit_after>
|
from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj()
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
|
from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj();
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
Remove a semicolon that didn't need to be therefrom _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj()
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
|
<commit_before>from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj();
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
<commit_msg>Remove a semicolon that didn't need to be there<commit_after>from _botan import *
init = LibraryInitializer()
def Filter(name, key = None, iv = None, dir = None):
if key != None and iv != None and dir != None:
return make_filter(name, key, iv, dir)
elif key != None and dir != None:
return make_filter(name, key, dir)
elif key != None:
return make_filter(name, key)
else:
return make_filter(name)
def Pipe(*filters):
pipe = PipeObj()
for filter in filters:
if filter:
pipe.append(filter)
return pipe
#def Filter(name, key):
# return make_filter(name, key)
|
6cb9008ee2ed49d9630735378bd84727aef3caef
|
dipy/core/tests/test_qball.py
|
dipy/core/tests/test_qball.py
|
""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
|
""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
TEST - some real_sph_harm tests
|
TEST - some real_sph_harm tests
|
Python
|
bsd-3-clause
|
villalonreina/dipy,samuelstjean/dipy,jyeatman/dipy,sinkpoint/dipy,mdesco/dipy,Messaoud-Boudjada/dipy,maurozucchelli/dipy,villalonreina/dipy,nilgoyyou/dipy,beni55/dipy,demianw/dipy,FrancoisRheaultUS/dipy,rfdougherty/dipy,JohnGriffiths/dipy,mdesco/dipy,Messaoud-Boudjada/dipy,JohnGriffiths/dipy,samuelstjean/dipy,samuelstjean/dipy,StongeEtienne/dipy,oesteban/dipy,maurozucchelli/dipy,matthieudumont/dipy,matthieudumont/dipy,nilgoyyou/dipy,oesteban/dipy,rfdougherty/dipy,beni55/dipy,FrancoisRheaultUS/dipy,jyeatman/dipy,maurozucchelli/dipy,sinkpoint/dipy,StongeEtienne/dipy,demianw/dipy
|
""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
TEST - some real_sph_harm tests
|
""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
<commit_before>""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
<commit_msg>TEST - some real_sph_harm tests<commit_after>
|
""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
TEST - some real_sph_harm tests""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
<commit_before>""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
yield assert_false(True)
<commit_msg>TEST - some real_sph_harm tests<commit_after>""" Testing qball
"""
import numpy as np
import dipy.core.qball as qball
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
@parametric
def test_sph_harm_ind_list():
m_list, n_list = qball.sph_harm_ind_list(8)
yield assert_equal(m_list.shape, n_list.shape)
yield assert_equal(m_list.ndim, 2)
yield assert_equal(m_list.shape, (45,1))
yield assert_true(np.all(np.abs(m_list) <= n_list))
yield assert_array_equal(n_list % 2, 0)
yield assert_raises(ValueError, qball.sph_harm_ind_list, 1)
@parametric
def test_real_sph_harm():
real_sh = qball.real_sph_harm(0, 0, 0, 0)
yield assert_true(True)
|
46ab8d71824f80ba5d02349a9f89328e5c47f434
|
app/views.py
|
app/views.py
|
from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": ipAddress
})
return app.response_class(response=response, mimetype='application/json')
|
from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import re
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": re.sub(r",.+$", "", ipAddress)
})
return app.response_class(response=response, mimetype='application/json')
|
Handle instance where we get multiple IP addresess in the response
|
Handle instance where we get multiple IP addresess in the response
|
Python
|
mit
|
taeram/gipsy
|
from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": ipAddress
})
return app.response_class(response=response, mimetype='application/json')
Handle instance where we get multiple IP addresess in the response
|
from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import re
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": re.sub(r",.+$", "", ipAddress)
})
return app.response_class(response=response, mimetype='application/json')
|
<commit_before>from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": ipAddress
})
return app.response_class(response=response, mimetype='application/json')
<commit_msg>Handle instance where we get multiple IP addresess in the response<commit_after>
|
from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import re
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": re.sub(r",.+$", "", ipAddress)
})
return app.response_class(response=response, mimetype='application/json')
|
from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": ipAddress
})
return app.response_class(response=response, mimetype='application/json')
Handle instance where we get multiple IP addresess in the responsefrom app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import re
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": re.sub(r",.+$", "", ipAddress)
})
return app.response_class(response=response, mimetype='application/json')
|
<commit_before>from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": ipAddress
})
return app.response_class(response=response, mimetype='application/json')
<commit_msg>Handle instance where we get multiple IP addresess in the response<commit_after>from app import app, \
cors_header
from flask import request, \
make_response, \
send_from_directory
import json
import re
import os
@app.route('/', methods=['GET'])
@cors_header
def index():
if 'X-Forwarded-For' in request.headers:
ipAddress = request.headers['X-Forwarded-For']
else:
ipAddress = request.remote_addr
response = json.dumps({
"address": re.sub(r",.+$", "", ipAddress)
})
return app.response_class(response=response, mimetype='application/json')
|
3439eb09916212cd71650aecc49ae1c22f650274
|
apps/package/templatetags/package_tags.py
|
apps/package/templatetags/package_tags.py
|
from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
Clean up some imports in the package app's template_tags.py file.
|
Clean up some imports in the package app's template_tags.py file.
|
Python
|
mit
|
nanuxbe/djangopackages,QLGu/djangopackages,cartwheelweb/packaginator,nanuxbe/djangopackages,nanuxbe/djangopackages,miketheman/opencomparison,QLGu/djangopackages,benracine/opencomparison,cartwheelweb/packaginator,audreyr/opencomparison,pydanny/djangopackages,miketheman/opencomparison,QLGu/djangopackages,benracine/opencomparison,cartwheelweb/packaginator,pydanny/djangopackages,pydanny/djangopackages,audreyr/opencomparison
|
from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
Clean up some imports in the package app's template_tags.py file.
|
from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
<commit_before>from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
<commit_msg>Clean up some imports in the package app's template_tags.py file.<commit_after>
|
from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
Clean up some imports in the package app's template_tags.py file.from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
<commit_before>from datetime import timedelta
from datetime import datetime
from django import template
from github2.client import Github
from package.models import Package, Commit
register = template.Library()
github = Github()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
<commit_msg>Clean up some imports in the package app's template_tags.py file.<commit_after>from datetime import datetime, timedelta
from django import template
from package.models import Commit
register = template.Library()
@register.filter
def commits_over_52(package):
current = datetime.now()
weeks = []
commits = Commit.objects.filter(package=package).values_list('commit_date', flat=True)
for week in range(52):
weeks.append(len([x for x in commits if x < current and x > (current - timedelta(7))]))
current -= timedelta(7)
weeks.reverse()
weeks = map(str, weeks)
return ','.join(weeks)
|
b034eeda25fcf55e7da018f3c91a23a5e252ae2f
|
bm/app/models.py
|
bm/app/models.py
|
from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.user) + ' ' + str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)
|
from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)
|
Change str() of Category for easier form handling
|
Change str() of Category for easier form handling
|
Python
|
mit
|
GSC-RNSIT/bookmark-manager,rohithpr/bookmark-manager,rohithpr/bookmark-manager,GSC-RNSIT/bookmark-manager
|
from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.user) + ' ' + str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)Change str() of Category for easier form handling
|
from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)
|
<commit_before>from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.user) + ' ' + str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)<commit_msg>Change str() of Category for easier form handling<commit_after>
|
from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)
|
from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.user) + ' ' + str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)Change str() of Category for easier form handlingfrom django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)
|
<commit_before>from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.user) + ' ' + str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)<commit_msg>Change str() of Category for easier form handling<commit_after>from django.db import models
from django.conf import settings
class Category(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
name = models.CharField(max_length=21)
row_number = models.IntegerField(default=0)
column_number = models.IntegerField(default=0)
progress_bar_color = models.CharField(max_length=6, default="335544")
# hidden = models.BooleanField(default=False)
# trash = models.BooleanField(default=False)
def __str__(self):
return str(self.name)
class Bookmark(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
row_number = models.IntegerField(default=0)
glyphicon = models.CharField(max_length=30, default="asterisk")
def __str__(self):
return str(self.category) + ' ' + str(self.name)
class Trash(models.Model):
category = models.ForeignKey(Category)
name = models.CharField(max_length=50)
link = models.TextField()
glyphicon = models.CharField(max_length=30)
def __str__(self):
return str(self.category) + ' ' + str(self.name)
|
f0b705f1fde5ac33a58ccd56ed67bfcbecf0daf3
|
djstripe/management/commands/djstripe_sync_customers.py
|
djstripe/management/commands/djstripe_sync_customers.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.username, user.pk
)
)
sync_customer(user)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.get_username(), user.pk
)
)
sync_customer(user)
|
Use user.get_username() instead of user.username
|
Use user.get_username() instead of user.username
To support custom User models
|
Python
|
bsd-3-clause
|
andrewyoung1991/dj-stripe,areski/dj-stripe,benmurden/dj-stripe,koobs/dj-stripe,areski/dj-stripe,tkwon/dj-stripe,kavdev/dj-stripe,aliev/dj-stripe,ctrengove/dj-stripe,cjrh/dj-stripe,StErMi/dj-stripe,mthornhill/dj-stripe,ctrengove/dj-stripe,cjrh/dj-stripe,iddqd1/dj-stripe,dj-stripe/dj-stripe,maxmalynowsky/django-stripe-rest,photocrowd/dj-stripe,davidgillies/dj-stripe,doctorwidget/dj-stripe,davidgillies/dj-stripe,andrewyoung1991/dj-stripe,mwarkentin/dj-stripe,jpadilla/dj-stripe,aliev/dj-stripe,benmurden/dj-stripe,jameshiew/dj-stripe,mwarkentin/dj-stripe,mthornhill/dj-stripe,LaunchlabAU/dj-stripe,tkwon/dj-stripe,jameshiew/dj-stripe,kavdev/dj-stripe,jleclanche/dj-stripe,doctorwidget/dj-stripe,iddqd1/dj-stripe,photocrowd/dj-stripe,LaunchlabAU/dj-stripe,koobs/dj-stripe,jleclanche/dj-stripe,jpadilla/dj-stripe,StErMi/dj-stripe,pydanny/dj-stripe,dj-stripe/dj-stripe,pydanny/dj-stripe
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.username, user.pk
)
)
sync_customer(user)
Use user.get_username() instead of user.username
To support custom User models
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.get_username(), user.pk
)
)
sync_customer(user)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.username, user.pk
)
)
sync_customer(user)
<commit_msg>Use user.get_username() instead of user.username
To support custom User models<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.get_username(), user.pk
)
)
sync_customer(user)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.username, user.pk
)
)
sync_customer(user)
Use user.get_username() instead of user.username
To support custom User models# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.get_username(), user.pk
)
)
sync_customer(user)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.username, user.pk
)
)
sync_customer(user)
<commit_msg>Use user.get_username() instead of user.username
To support custom User models<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from ...settings import get_user_model
from ...sync import sync_customer
User = get_user_model()
class Command(BaseCommand):
help = "Sync customer data with stripe"
def handle(self, *args, **options):
qs = User.objects.exclude(customer__isnull=True)
count = 0
total = qs.count()
for user in qs:
count += 1
perc = int(round(100 * (float(count) / float(total))))
print(
"[{0}/{1} {2}%] Syncing {3} [{4}]".format(
count, total, perc, user.get_username(), user.pk
)
)
sync_customer(user)
|
bf17a86bccf25ead90d11dd15a900cb784d9cb9f
|
raco/myrial/myrial_test.py
|
raco/myrial/myrial_test.py
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
Revert "Add compile_to_json invocation in Myrial test fixture"
|
Revert "Add compile_to_json invocation in Myrial test fixture"
This reverts commit ceb848021d5323b5bad8518ac7ed850a51fc89ca.
|
Python
|
bsd-3-clause
|
uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco,uwescience/raco
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
Revert "Add compile_to_json invocation in Myrial test fixture"
This reverts commit ceb848021d5323b5bad8518ac7ed850a51fc89ca.
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
<commit_before>
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
<commit_msg>Revert "Add compile_to_json invocation in Myrial test fixture"
This reverts commit ceb848021d5323b5bad8518ac7ed850a51fc89ca.<commit_after>
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
Revert "Add compile_to_json invocation in Myrial test fixture"
This reverts commit ceb848021d5323b5bad8518ac7ed850a51fc89ca.
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
<commit_before>
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
from raco.myrialang import compile_to_json
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
json = compile_to_json(query, '', [('A', plan)])
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
<commit_msg>Revert "Add compile_to_json invocation in Myrial test fixture"
This reverts commit ceb848021d5323b5bad8518ac7ed850a51fc89ca.<commit_after>
import collections
import math
import unittest
import raco.fakedb
import raco.myrial.interpreter as interpreter
import raco.myrial.parser as parser
class MyrialTestCase(unittest.TestCase):
def setUp(self):
self.db = raco.fakedb.FakeDatabase()
self.parser = parser.Parser()
self.processor = interpreter.StatementProcessor(self.db)
def execute_query(self, query, test_logical=False):
'''Run a test query against the fake database'''
statements = self.parser.parse(query)
self.processor.evaluate(statements)
if test_logical:
plan = self.processor.get_logical_plan()
else:
plan = self.processor.get_physical_plan()
self.db.evaluate(plan)
return self.db.get_temp_table('__OUTPUT0__')
def run_test(self, query, expected, test_logical=False):
'''Execute a test query with an expected output'''
actual = self.execute_query(query, test_logical)
self.assertEquals(actual, expected)
|
2bdc5c33b1e9eb394eb62533f4ae4df081ea1452
|
numpy/setup.py
|
numpy/setup.py
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('_array_api')
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
Make the _array_api submodule install correctly
|
Make the _array_api submodule install correctly
|
Python
|
bsd-3-clause
|
seberg/numpy,mhvk/numpy,mattip/numpy,endolith/numpy,seberg/numpy,mattip/numpy,numpy/numpy,simongibbons/numpy,simongibbons/numpy,charris/numpy,endolith/numpy,charris/numpy,rgommers/numpy,rgommers/numpy,simongibbons/numpy,anntzer/numpy,pdebuyl/numpy,jakirkham/numpy,endolith/numpy,mattip/numpy,anntzer/numpy,jakirkham/numpy,seberg/numpy,seberg/numpy,jakirkham/numpy,charris/numpy,mhvk/numpy,pdebuyl/numpy,simongibbons/numpy,numpy/numpy,numpy/numpy,mattip/numpy,mhvk/numpy,numpy/numpy,anntzer/numpy,endolith/numpy,rgommers/numpy,rgommers/numpy,simongibbons/numpy,anntzer/numpy,pdebuyl/numpy,pdebuyl/numpy,charris/numpy,jakirkham/numpy,jakirkham/numpy,mhvk/numpy,mhvk/numpy
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
Make the _array_api submodule install correctly
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('_array_api')
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
<commit_before>#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
<commit_msg>Make the _array_api submodule install correctly<commit_after>
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('_array_api')
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
Make the _array_api submodule install correctly#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('_array_api')
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
<commit_before>#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
<commit_msg>Make the _array_api submodule install correctly<commit_after>#!/usr/bin/env python3
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('numpy', parent_package, top_path)
config.add_subpackage('_array_api')
config.add_subpackage('compat')
config.add_subpackage('core')
config.add_subpackage('distutils')
config.add_subpackage('doc')
config.add_subpackage('f2py')
config.add_subpackage('fft')
config.add_subpackage('lib')
config.add_subpackage('linalg')
config.add_subpackage('ma')
config.add_subpackage('matrixlib')
config.add_subpackage('polynomial')
config.add_subpackage('random')
config.add_subpackage('testing')
config.add_subpackage('typing')
config.add_data_dir('doc')
config.add_data_files('py.typed')
config.add_data_files('*.pyi')
config.add_subpackage('tests')
config.make_config_py() # installs __config__.py
return config
if __name__ == '__main__':
print('This is the wrong setup.py file to run')
|
29cc95bbdb12e50d09e8079bfae5841a7e734743
|
plinth/modules/help/urls.py
|
plinth/modules/help/urls.py
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', views.index, name='index'),
url(r'^help/index/$', views.index, name='index_explicit'),
url(r'^help/about/$', views.about, name='about'),
url(r'^help/manual/$', views.manual, name='manual'),
url(r'^help/status-log/$', views.status_log, name='status-log'),
]
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from plinth.utils import non_admin_view
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', non_admin_view(views.index), name='index'),
url(r'^help/index/$', non_admin_view(views.index), name='index_explicit'),
url(r'^help/about/$', non_admin_view(views.about), name='about'),
url(r'^help/manual/$', non_admin_view(views.manual), name='manual'),
url(r'^help/status-log/$', non_admin_view(views.status_log), name='status-log'),
]
|
Make help accessible for logged-in non-admin users
|
Make help accessible for logged-in non-admin users
Signed-off-by: Hemanth Kumar Veeranki <hemanthveeranki@gmail.com>
Reviewed-by: Johannes Keyser <187051b70230423a457adbc3e507f9e4fff08d4b@posteo.de>
|
Python
|
agpl-3.0
|
vignanl/Plinth,vignanl/Plinth,kkampardi/Plinth,vignanl/Plinth,harry-7/Plinth,harry-7/Plinth,kkampardi/Plinth,kkampardi/Plinth,vignanl/Plinth,kkampardi/Plinth,harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,harry-7/Plinth,harry-7/Plinth
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', views.index, name='index'),
url(r'^help/index/$', views.index, name='index_explicit'),
url(r'^help/about/$', views.about, name='about'),
url(r'^help/manual/$', views.manual, name='manual'),
url(r'^help/status-log/$', views.status_log, name='status-log'),
]
Make help accessible for logged-in non-admin users
Signed-off-by: Hemanth Kumar Veeranki <hemanthveeranki@gmail.com>
Reviewed-by: Johannes Keyser <187051b70230423a457adbc3e507f9e4fff08d4b@posteo.de>
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from plinth.utils import non_admin_view
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', non_admin_view(views.index), name='index'),
url(r'^help/index/$', non_admin_view(views.index), name='index_explicit'),
url(r'^help/about/$', non_admin_view(views.about), name='about'),
url(r'^help/manual/$', non_admin_view(views.manual), name='manual'),
url(r'^help/status-log/$', non_admin_view(views.status_log), name='status-log'),
]
|
<commit_before>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', views.index, name='index'),
url(r'^help/index/$', views.index, name='index_explicit'),
url(r'^help/about/$', views.about, name='about'),
url(r'^help/manual/$', views.manual, name='manual'),
url(r'^help/status-log/$', views.status_log, name='status-log'),
]
<commit_msg>Make help accessible for logged-in non-admin users
Signed-off-by: Hemanth Kumar Veeranki <hemanthveeranki@gmail.com>
Reviewed-by: Johannes Keyser <187051b70230423a457adbc3e507f9e4fff08d4b@posteo.de><commit_after>
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from plinth.utils import non_admin_view
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', non_admin_view(views.index), name='index'),
url(r'^help/index/$', non_admin_view(views.index), name='index_explicit'),
url(r'^help/about/$', non_admin_view(views.about), name='about'),
url(r'^help/manual/$', non_admin_view(views.manual), name='manual'),
url(r'^help/status-log/$', non_admin_view(views.status_log), name='status-log'),
]
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', views.index, name='index'),
url(r'^help/index/$', views.index, name='index_explicit'),
url(r'^help/about/$', views.about, name='about'),
url(r'^help/manual/$', views.manual, name='manual'),
url(r'^help/status-log/$', views.status_log, name='status-log'),
]
Make help accessible for logged-in non-admin users
Signed-off-by: Hemanth Kumar Veeranki <hemanthveeranki@gmail.com>
Reviewed-by: Johannes Keyser <187051b70230423a457adbc3e507f9e4fff08d4b@posteo.de>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from plinth.utils import non_admin_view
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', non_admin_view(views.index), name='index'),
url(r'^help/index/$', non_admin_view(views.index), name='index_explicit'),
url(r'^help/about/$', non_admin_view(views.about), name='about'),
url(r'^help/manual/$', non_admin_view(views.manual), name='manual'),
url(r'^help/status-log/$', non_admin_view(views.status_log), name='status-log'),
]
|
<commit_before>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', views.index, name='index'),
url(r'^help/index/$', views.index, name='index_explicit'),
url(r'^help/about/$', views.about, name='about'),
url(r'^help/manual/$', views.manual, name='manual'),
url(r'^help/status-log/$', views.status_log, name='status-log'),
]
<commit_msg>Make help accessible for logged-in non-admin users
Signed-off-by: Hemanth Kumar Veeranki <hemanthveeranki@gmail.com>
Reviewed-by: Johannes Keyser <187051b70230423a457adbc3e507f9e4fff08d4b@posteo.de><commit_after>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
URLs for the Help module
"""
from django.conf.urls import url
from plinth.utils import non_admin_view
from . import help as views
urlpatterns = [
# having two urls for one page is a hack to help the current url/menu
# system highlight the correct menu item. Every submenu-item with the same
# url prefix as the main-menu is highlighted automatically.
url(r'^help/$', non_admin_view(views.index), name='index'),
url(r'^help/index/$', non_admin_view(views.index), name='index_explicit'),
url(r'^help/about/$', non_admin_view(views.about), name='about'),
url(r'^help/manual/$', non_admin_view(views.manual), name='manual'),
url(r'^help/status-log/$', non_admin_view(views.status_log), name='status-log'),
]
|
5edddcc85b0e21bb576b71db63d082c8ace5cf70
|
examples/boilerplates/samples/google_test.py
|
examples/boilerplates/samples/google_test.py
|
'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.assert_element(ResultsPage.google_logo)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
|
'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
|
Update Google boilerplate test. (Logo frequently changes)
|
Update Google boilerplate test. (Logo frequently changes)
|
Python
|
mit
|
seleniumbase/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase
|
'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.assert_element(ResultsPage.google_logo)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
Update Google boilerplate test. (Logo frequently changes)
|
'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
|
<commit_before>'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.assert_element(ResultsPage.google_logo)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
<commit_msg>Update Google boilerplate test. (Logo frequently changes)<commit_after>
|
'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
|
'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.assert_element(ResultsPage.google_logo)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
Update Google boilerplate test. (Logo frequently changes)'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
|
<commit_before>'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.assert_element(ResultsPage.google_logo)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
<commit_msg>Update Google boilerplate test. (Logo frequently changes)<commit_after>'''
Google.com testing example
'''
from seleniumbase import BaseCase
from google_objects import HomePage, ResultsPage
class GoogleTests(BaseCase):
def test_google_dot_com(self):
self.open('http://www.google.com')
self.assert_element(HomePage.search_button)
self.assert_element(HomePage.feeling_lucky_button)
self.update_text(HomePage.search_box, "github\n")
self.assert_text("github.com", ResultsPage.search_results)
self.click_link_text("Images")
self.assert_element('img[alt="Image result for github"]')
|
4b065401e3679d479163bd825c0a3526f0ba1bdf
|
conf-template.py
|
conf-template.py
|
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
|
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
debug = False
|
Add default for debug in conf template
|
Add default for debug in conf template
|
Python
|
bsd-3-clause
|
TimSC/auxgis
|
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
Add default for debug in conf template
|
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
debug = False
|
<commit_before>
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
<commit_msg>Add default for debug in conf template<commit_after>
|
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
debug = False
|
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
Add default for debug in conf template
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
debug = False
|
<commit_before>
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
<commit_msg>Add default for debug in conf template<commit_after>
flickrKey = ""
flickrSecret = ""
recaptchaEnabled = False
recaptchaKey = ""
recaptchaSecret = ""
debug = False
|
2c87cc9a864cbcd0ff0668a348fda1ba82b8a74c
|
gensysinfo.py
|
gensysinfo.py
|
#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent()))
time.sleep(20)
|
#!/usr/bin/env python3
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent() / 100))
time.sleep(20)
|
Convert percentage to a value between 0 and 1
|
Convert percentage to a value between 0 and 1
|
Python
|
mit
|
wilfriedvanasten/miscvar,wilfriedvanasten/miscvar,wilfriedvanasten/miscvar
|
#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent()))
time.sleep(20)
Convert percentage to a value between 0 and 1
|
#!/usr/bin/env python3
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent() / 100))
time.sleep(20)
|
<commit_before>#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent()))
time.sleep(20)
<commit_msg>Convert percentage to a value between 0 and 1<commit_after>
|
#!/usr/bin/env python3
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent() / 100))
time.sleep(20)
|
#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent()))
time.sleep(20)
Convert percentage to a value between 0 and 1#!/usr/bin/env python3
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent() / 100))
time.sleep(20)
|
<commit_before>#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent()))
time.sleep(20)
<commit_msg>Convert percentage to a value between 0 and 1<commit_after>#!/usr/bin/env python3
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent() / 100))
time.sleep(20)
|
50fc79e1231eeb59f66a79c29abe2b843e6fd893
|
atlas/api/__init__.py
|
atlas/api/__init__.py
|
#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if current_app.debug: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
|
#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if log.getEffectiveLevel() == logging.DEBUG: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
|
Use LOG_LEVEL to enable/disable request logging
|
Use LOG_LEVEL to enable/disable request logging
|
Python
|
mit
|
joshfriend/atlas,joshfriend/atlas
|
#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if current_app.debug: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
Use LOG_LEVEL to enable/disable request logging
|
#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if log.getEffectiveLevel() == logging.DEBUG: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
|
<commit_before>#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if current_app.debug: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
<commit_msg>Use LOG_LEVEL to enable/disable request logging<commit_after>
|
#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if log.getEffectiveLevel() == logging.DEBUG: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
|
#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if current_app.debug: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
Use LOG_LEVEL to enable/disable request logging#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if log.getEffectiveLevel() == logging.DEBUG: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
|
<commit_before>#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if current_app.debug: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
<commit_msg>Use LOG_LEVEL to enable/disable request logging<commit_after>#!/usr/bin/env python
import logging
from flask import Blueprint, current_app, request
api_v1_blueprint = Blueprint("api_v1", __name__, url_prefix='/api/v1')
log = logging.getLogger('api')
@api_v1_blueprint.after_request
def log_response(response):
"""Log any requests/responses with an error code"""
if log.getEffectiveLevel() == logging.DEBUG: # pragma: no cover, debugging only
log.debug('%7s: %s - %i', request.method, request.url,
response.status_code)
if response.status_code >= 400:
log.debug('Response data: \n%s', response.data)
log.debug('Request data: \n%s', request.data)
return response
# Import the resources to add the routes to the blueprint before the app is
# initialized
from . import webhook
|
92c58061d4460addbe75081090371814e1a5f580
|
working_waterfronts/working_waterfronts_api/views/pointsofinterest.py
|
working_waterfronts/working_waterfronts_api/views/pointsofinterest.py
|
from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pointsofinterest/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
|
from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pois/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
|
Change docstrong to reflect actual endpoint
|
Change docstrong to reflect actual endpoint
|
Python
|
apache-2.0
|
osu-cass/working-waterfronts-api,osu-cass/working-waterfronts-api,osu-cass/working-waterfronts-api
|
from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pointsofinterest/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
Change docstrong to reflect actual endpoint
|
from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pois/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
|
<commit_before>from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pointsofinterest/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
<commit_msg>Change docstrong to reflect actual endpoint<commit_after>
|
from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pois/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
|
from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pointsofinterest/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
Change docstrong to reflect actual endpointfrom django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pois/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
|
<commit_before>from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pointsofinterest/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
<commit_msg>Change docstrong to reflect actual endpoint<commit_after>from django.http import (HttpResponse,
HttpResponseNotFound)
from django.contrib.gis.measure import D
from working_waterfronts.working_waterfronts_api.models import PointOfInterest
from working_waterfronts.working_waterfronts_api.functions import get_lat_long_prox
import json
from .serializer import ObjectSerializer
def poi_list(request):
"""
*/pois/*
List all pointsofinterest in the database. There is no order to this list,
only whatever is returned by the database.
"""
error = {
'status': False,
'name': None,
'text': None,
'level': None,
'debug': None
}
data = {}
point, proximity, limit, error = get_lat_long_prox(request, error)
if point:
poi_list = PointOfInterest.objects.filter(
location__distance_lte=(point, D(mi=proximity)))[:limit]
else:
poi_list = PointOfInterest.objects.all()[:limit]
if not poi_list:
error = {
"status": True,
"name": "No PointsOfInterest",
"text": "No PointsOfInterest found",
"level": "Information",
"debug": ""
}
serializer = ObjectSerializer()
data = {
"pointsofinterest": json.loads(
serializer.serialize(
poi_list,
use_natural_foreign_keys=True
)
),
"error": error
}
return HttpResponse(json.dumps(data), content_type="application/json")
|
7b88a2e65b0010ceef49fdbce61949ee10420cd8
|
desertbot/modules/utils/CommandHandler.py
|
desertbot/modules/utils/CommandHandler.py
|
"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
|
"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def __init__(self):
BotModule.__init__(self)
self.loadingPriority = 10
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
|
Load the command handler before the commands
|
Load the command handler before the commands
|
Python
|
mit
|
DesertBot/DesertBot
|
"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
Load the command handler before the commands
|
"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def __init__(self):
BotModule.__init__(self)
self.loadingPriority = 10
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
|
<commit_before>"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
<commit_msg>Load the command handler before the commands<commit_after>
|
"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def __init__(self):
BotModule.__init__(self)
self.loadingPriority = 10
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
|
"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
Load the command handler before the commands"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def __init__(self):
BotModule.__init__(self)
self.loadingPriority = 10
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
|
<commit_before>"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
<commit_msg>Load the command handler before the commands<commit_after>"""
Created on Feb 28, 2018
@author: StarlitGhost
"""
from twisted.plugin import IPlugin
from desertbot.moduleinterface import IModule, BotModule
from zope.interface import implementer
@implementer(IPlugin, IModule)
class CommandHandler(BotModule):
def __init__(self):
BotModule.__init__(self)
self.loadingPriority = 10
def actions(self):
return super(CommandHandler, self).actions() + [('message-channel', 1, self.handleCommand),
('message-user', 1, self.handleCommand)]
def handleCommand(self, message):
if message.command:
return self.bot.moduleHandler.runGatheringAction('botmessage', message)
commandhandler = CommandHandler()
|
31921ce5ca7ccbaa2db8b8fa11b2b9a6caa14aeb
|
daisyproducer/settings.py
|
daisyproducer/settings.py
|
from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
|
from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
if SERVE_STATIC_FILES:
INSTALLED_APPS += ('django.contrib.staticfiles',)
|
Use django.contrib.staticfiles when running locally
|
Use django.contrib.staticfiles when running locally
so that the admin interface uses the proper css when running on the
dev machine
|
Python
|
agpl-3.0
|
sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer
|
from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
Use django.contrib.staticfiles when running locally
so that the admin interface uses the proper css when running on the
dev machine
|
from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
if SERVE_STATIC_FILES:
INSTALLED_APPS += ('django.contrib.staticfiles',)
|
<commit_before>from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
<commit_msg>Use django.contrib.staticfiles when running locally
so that the admin interface uses the proper css when running on the
dev machine<commit_after>
|
from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
if SERVE_STATIC_FILES:
INSTALLED_APPS += ('django.contrib.staticfiles',)
|
from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
Use django.contrib.staticfiles when running locally
so that the admin interface uses the proper css when running on the
dev machinefrom settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
if SERVE_STATIC_FILES:
INSTALLED_APPS += ('django.contrib.staticfiles',)
|
<commit_before>from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
<commit_msg>Use django.contrib.staticfiles when running locally
so that the admin interface uses the proper css when running on the
dev machine<commit_after>from settings_common import *
PACKAGE_VERSION = "0.5"
DEBUG = TEMPLATE_DEBUG = True
DAISY_PIPELINE_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp', 'pipeline')
EXTERNAL_PATH = os.path.join(PROJECT_DIR, '..', '..', '..', 'tmp')
SERVE_STATIC_FILES = True
# the following is an idea from https://code.djangoproject.com/wiki/SplitSettings
# We have both local settings and common settings. They are used as follows:
# - common settings are shared data between normal settings and unit test settings
# - local settings are used on productive servers to keep the local
# settings such as db passwords, etc out of version control
try:
from settings_local import *
except ImportError:
pass
if SERVE_STATIC_FILES:
INSTALLED_APPS += ('django.contrib.staticfiles',)
|
3b75a6f3654e8f325060779ca56b6df93fe0cabe
|
genome_designer/main/demo_view_overrides.py
|
genome_designer/main/demo_view_overrides.py
|
"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
return HttpResponseRedirect("/")
|
"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
redirect_url = request.GET.get('next', '/')
return HttpResponseRedirect(redirect_url)
|
Handle redirect_url in demo login bypass.
|
Handle redirect_url in demo login bypass.
|
Python
|
mit
|
churchlab/millstone,churchlab/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,woodymit/millstone,churchlab/millstone
|
"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
return HttpResponseRedirect("/")
Handle redirect_url in demo login bypass.
|
"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
redirect_url = request.GET.get('next', '/')
return HttpResponseRedirect(redirect_url)
|
<commit_before>"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
return HttpResponseRedirect("/")
<commit_msg>Handle redirect_url in demo login bypass.<commit_after>
|
"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
redirect_url = request.GET.get('next', '/')
return HttpResponseRedirect(redirect_url)
|
"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
return HttpResponseRedirect("/")
Handle redirect_url in demo login bypass."""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
redirect_url = request.GET.get('next', '/')
return HttpResponseRedirect(redirect_url)
|
<commit_before>"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
return HttpResponseRedirect("/")
<commit_msg>Handle redirect_url in demo login bypass.<commit_after>"""View overrides for demo mode.
"""
from django.contrib.auth import authenticate
from django.contrib.auth import login
from django.http import HttpResponseRedirect
def login_demo_account(request):
new_user = authenticate(username='gmcdev',
password='g3n3d3z')
login(request, new_user)
redirect_url = request.GET.get('next', '/')
return HttpResponseRedirect(redirect_url)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.