commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
489129bd26f72967fbbc279ab488fe047714a250
|
qual/tests/test_iso.py
|
qual/tests/test_iso.py
|
import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes())
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
|
import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
|
Use timezones=[] to underline that we only care about dates.
|
Use timezones=[] to underline that we only care about dates.
|
Python
|
apache-2.0
|
jwg4/qual,jwg4/calexicon
|
import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes())
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
Use timezones=[] to underline that we only care about dates.
|
import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
|
<commit_before>import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes())
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
<commit_msg>Use timezones=[] to underline that we only care about dates.<commit_after>
|
import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
|
import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes())
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
Use timezones=[] to underline that we only care about dates.import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
|
<commit_before>import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes())
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
<commit_msg>Use timezones=[] to underline that we only care about dates.<commit_after>import unittest
from hypothesis import given
from hypothesis.extra.datetime import datetimes
import qual
from datetime import date
class TestIsoUtils(unittest.TestCase):
@given(datetimes(timezones=[]))
def test_round_trip_date(self, dt):
d = dt.date()
self.assertEqual(qual.iso_to_gregorian(*d.isocalendar()), d)
|
b72f0ed25750a29b5dc1cdd2790102d8351606f9
|
pronto_feedback/feedback/views.py
|
pronto_feedback/feedback/views.py
|
import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
next(reader, None)
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
|
import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
reader.next()
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
|
Use next method of reader object
|
Use next method of reader object
|
Python
|
mit
|
zkan/pronto-feedback,zkan/pronto-feedback
|
import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
next(reader, None)
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
Use next method of reader object
|
import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
reader.next()
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
|
<commit_before>import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
next(reader, None)
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
<commit_msg>Use next method of reader object<commit_after>
|
import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
reader.next()
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
|
import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
next(reader, None)
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
Use next method of reader objectimport csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
reader.next()
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
|
<commit_before>import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
next(reader, None)
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
<commit_msg>Use next method of reader object<commit_after>import csv
from datetime import datetime
from django.shortcuts import render
from django.utils import timezone
from django.views.generic import TemplateView
from .forms import FeedbackUploadForm
from .models import Feedback
class FeedbackView(TemplateView):
template_name = 'index.html'
def get(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm()
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
def save_feedback(self, data):
creation_date = datetime.strptime(
data[2],
'%m/%d/%Y %H:%M'
).replace(tzinfo=timezone.utc)
Feedback.objects.create(
fid=data[0],
creation_date=creation_date,
question_asked=data[4],
message=data[5]
)
def post(self, request):
feedback = Feedback.objects.all()
form = FeedbackUploadForm(request.POST, request.FILES)
if form.is_valid():
reader = csv.reader(request.FILES['file_upload'])
reader.next()
map(self.save_feedback, reader)
return render(
request,
self.template_name,
{
'form': form,
'feedback': feedback
}
)
|
8780d2eb3d7782e7f1e6c23e2e428a72e6bd3dad
|
server/kcaa/manipulator_util_test.py
|
server/kcaa/manipulator_util_test.py
|
#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [0, 3600])
assert not manager.are_auto_manipulator_scheduled(0)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [[0, 3600]])
assert not manager.are_auto_manipulator_scheduled(0)
def test_are_auto_manipulator_scheduled_one_fragment(self, manager):
manager.set_auto_manipulator_schedules(True, [[0, 3600]])
assert manager.are_auto_manipulator_scheduled(0)
assert manager.are_auto_manipulator_scheduled(1800)
assert manager.are_auto_manipulator_scheduled(3599)
assert not manager.are_auto_manipulator_scheduled(3600)
assert not manager.are_auto_manipulator_scheduled(5400)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
|
Add a test for one schedule fragment.
|
Add a test for one schedule fragment.
|
Python
|
apache-2.0
|
kcaa/kcaa,kcaa/kcaa,kcaa/kcaa,kcaa/kcaa
|
#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [0, 3600])
assert not manager.are_auto_manipulator_scheduled(0)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
Add a test for one schedule fragment.
|
#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [[0, 3600]])
assert not manager.are_auto_manipulator_scheduled(0)
def test_are_auto_manipulator_scheduled_one_fragment(self, manager):
manager.set_auto_manipulator_schedules(True, [[0, 3600]])
assert manager.are_auto_manipulator_scheduled(0)
assert manager.are_auto_manipulator_scheduled(1800)
assert manager.are_auto_manipulator_scheduled(3599)
assert not manager.are_auto_manipulator_scheduled(3600)
assert not manager.are_auto_manipulator_scheduled(5400)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [0, 3600])
assert not manager.are_auto_manipulator_scheduled(0)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
<commit_msg>Add a test for one schedule fragment.<commit_after>
|
#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [[0, 3600]])
assert not manager.are_auto_manipulator_scheduled(0)
def test_are_auto_manipulator_scheduled_one_fragment(self, manager):
manager.set_auto_manipulator_schedules(True, [[0, 3600]])
assert manager.are_auto_manipulator_scheduled(0)
assert manager.are_auto_manipulator_scheduled(1800)
assert manager.are_auto_manipulator_scheduled(3599)
assert not manager.are_auto_manipulator_scheduled(3600)
assert not manager.are_auto_manipulator_scheduled(5400)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [0, 3600])
assert not manager.are_auto_manipulator_scheduled(0)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
Add a test for one schedule fragment.#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [[0, 3600]])
assert not manager.are_auto_manipulator_scheduled(0)
def test_are_auto_manipulator_scheduled_one_fragment(self, manager):
manager.set_auto_manipulator_schedules(True, [[0, 3600]])
assert manager.are_auto_manipulator_scheduled(0)
assert manager.are_auto_manipulator_scheduled(1800)
assert manager.are_auto_manipulator_scheduled(3599)
assert not manager.are_auto_manipulator_scheduled(3600)
assert not manager.are_auto_manipulator_scheduled(5400)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [0, 3600])
assert not manager.are_auto_manipulator_scheduled(0)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
<commit_msg>Add a test for one schedule fragment.<commit_after>#!/usr/bin/env python
import pytest
import manipulator_util
class TestManipulatorManager(object):
def pytest_funcarg__manager(self, request):
return manipulator_util.ManipulatorManager(None, {}, 0)
def test_in_schedule_fragment(self):
in_schedule_fragment = (
manipulator_util.ManipulatorManager.in_schedule_fragment)
assert in_schedule_fragment(0, [0, 3600])
assert in_schedule_fragment(1800, [0, 3600])
assert in_schedule_fragment(3599, [0, 3600])
assert not in_schedule_fragment(3600, [0, 3600])
assert not in_schedule_fragment(5400, [0, 3600])
def test_are_auto_manipulator_scheduled_disabled(self, manager):
manager.set_auto_manipulator_schedules(False, [[0, 3600]])
assert not manager.are_auto_manipulator_scheduled(0)
def test_are_auto_manipulator_scheduled_one_fragment(self, manager):
manager.set_auto_manipulator_schedules(True, [[0, 3600]])
assert manager.are_auto_manipulator_scheduled(0)
assert manager.are_auto_manipulator_scheduled(1800)
assert manager.are_auto_manipulator_scheduled(3599)
assert not manager.are_auto_manipulator_scheduled(3600)
assert not manager.are_auto_manipulator_scheduled(5400)
def main():
import doctest
doctest.testmod(manipulator_util)
pytest.main(args=[__file__.replace('.pyc', '.py')])
if __name__ == '__main__':
main()
|
3987f059632c64058862425407cdc165d4f3182b
|
python/qisrc/test/test_qisrc_list.py
|
python/qisrc/test/test_qisrc_list.py
|
from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
|
from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
def test_list_with_pattern(qisrc_action, record_messages):
qisrc_action.git_worktree.create_git_project("foo")
qisrc_action.git_worktree.create_git_project("baz")
qisrc_action.git_worktree.create_git_project("foobar")
record_messages.reset()
qisrc_action("list", "foo.*")
assert ui.find_message("foo")
assert ui.find_message("foobar")
assert not ui.find_message("baz")
|
Add test for qisrc list
|
Add test for qisrc list
Change-Id: I04c08f60044ffb0ba2ff63141d085e4dc2545455
|
Python
|
bsd-3-clause
|
dmerejkowsky/qibuild,aldebaran/qibuild,aldebaran/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild,aldebaran/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild,aldebaran/qibuild
|
from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
Add test for qisrc list
Change-Id: I04c08f60044ffb0ba2ff63141d085e4dc2545455
|
from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
def test_list_with_pattern(qisrc_action, record_messages):
qisrc_action.git_worktree.create_git_project("foo")
qisrc_action.git_worktree.create_git_project("baz")
qisrc_action.git_worktree.create_git_project("foobar")
record_messages.reset()
qisrc_action("list", "foo.*")
assert ui.find_message("foo")
assert ui.find_message("foobar")
assert not ui.find_message("baz")
|
<commit_before>from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
<commit_msg>Add test for qisrc list
Change-Id: I04c08f60044ffb0ba2ff63141d085e4dc2545455<commit_after>
|
from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
def test_list_with_pattern(qisrc_action, record_messages):
qisrc_action.git_worktree.create_git_project("foo")
qisrc_action.git_worktree.create_git_project("baz")
qisrc_action.git_worktree.create_git_project("foobar")
record_messages.reset()
qisrc_action("list", "foo.*")
assert ui.find_message("foo")
assert ui.find_message("foobar")
assert not ui.find_message("baz")
|
from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
Add test for qisrc list
Change-Id: I04c08f60044ffb0ba2ff63141d085e4dc2545455from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
def test_list_with_pattern(qisrc_action, record_messages):
qisrc_action.git_worktree.create_git_project("foo")
qisrc_action.git_worktree.create_git_project("baz")
qisrc_action.git_worktree.create_git_project("foobar")
record_messages.reset()
qisrc_action("list", "foo.*")
assert ui.find_message("foo")
assert ui.find_message("foobar")
assert not ui.find_message("baz")
|
<commit_before>from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
<commit_msg>Add test for qisrc list
Change-Id: I04c08f60044ffb0ba2ff63141d085e4dc2545455<commit_after>from qisys import ui
def test_list_tips_when_empty(qisrc_action, record_messages):
qisrc_action("init")
qisrc_action("list")
assert ui.find_message("Tips")
def test_list_with_pattern(qisrc_action, record_messages):
qisrc_action.git_worktree.create_git_project("foo")
qisrc_action.git_worktree.create_git_project("baz")
qisrc_action.git_worktree.create_git_project("foobar")
record_messages.reset()
qisrc_action("list", "foo.*")
assert ui.find_message("foo")
assert ui.find_message("foobar")
assert not ui.find_message("baz")
|
46352a3252e7827d349573c58608a1eefe163c21
|
cbagent/collectors/eventing_stats.py
|
cbagent/collectors/eventing_stats.py
|
from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '25000'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
|
from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '8096'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
|
Fix port number for eventing stats
|
Fix port number for eventing stats
Change-Id: Ifaa7e9957f919febb1a297683077cd1c71c6aa9d
Reviewed-on: http://review.couchbase.org/84633
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Mahesh Mandhare <f05f25073b6d9a2858c6a374b80c02d5a2bccf33@couchbase.com>
|
Python
|
apache-2.0
|
pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,pavel-paulau/perfrunner,couchbase/perfrunner,couchbase/perfrunner,couchbase/perfrunner
|
from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '25000'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
Fix port number for eventing stats
Change-Id: Ifaa7e9957f919febb1a297683077cd1c71c6aa9d
Reviewed-on: http://review.couchbase.org/84633
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Mahesh Mandhare <f05f25073b6d9a2858c6a374b80c02d5a2bccf33@couchbase.com>
|
from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '8096'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
|
<commit_before>from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '25000'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
<commit_msg>Fix port number for eventing stats
Change-Id: Ifaa7e9957f919febb1a297683077cd1c71c6aa9d
Reviewed-on: http://review.couchbase.org/84633
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Mahesh Mandhare <f05f25073b6d9a2858c6a374b80c02d5a2bccf33@couchbase.com><commit_after>
|
from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '8096'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
|
from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '25000'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
Fix port number for eventing stats
Change-Id: Ifaa7e9957f919febb1a297683077cd1c71c6aa9d
Reviewed-on: http://review.couchbase.org/84633
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Mahesh Mandhare <f05f25073b6d9a2858c6a374b80c02d5a2bccf33@couchbase.com>from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '8096'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
|
<commit_before>from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '25000'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
<commit_msg>Fix port number for eventing stats
Change-Id: Ifaa7e9957f919febb1a297683077cd1c71c6aa9d
Reviewed-on: http://review.couchbase.org/84633
Tested-by: Build Bot <80754af91bfb6d1073585b046fe0a474ce868509@couchbase.com>
Reviewed-by: Mahesh Mandhare <f05f25073b6d9a2858c6a374b80c02d5a2bccf33@couchbase.com><commit_after>from cbagent.collectors import Collector
class EventingStats(Collector):
COLLECTOR = "eventing_stats"
def __init__(self, settings, test):
super().__init__(settings)
self.eventing_node = test.function_nodes[0]
self.functions = test.functions
def _get_processing_stats(self, function_name="perf-test1"):
port = '8096'
uri = "/getEventProcessingStats?name={}".format(function_name)
samples = self.get_http(path=uri, server=self.eventing_node, port=port)
return samples
def sample(self):
for name, function in self.functions.items():
stats = self._get_processing_stats(function_name=name)
if stats:
self.update_metric_metadata(stats.keys(), bucket=name)
self.store.append(stats, cluster=self.cluster,
bucket=name, collector=self.COLLECTOR)
def update_metadata(self):
self.mc.add_cluster()
for name, function in self.functions.items():
self.mc.add_bucket(name)
|
bccd730eea204bd5c5ff99c919d87b13d9f25c73
|
examples/plugin_example/gwexample/analyses/tasks.py
|
examples/plugin_example/gwexample/analyses/tasks.py
|
from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer, min=1)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
Add a minimum value to fibonacci example
|
Add a minimum value to fibonacci example
|
Python
|
apache-2.0
|
girder/girder_worker,girder/girder_worker,girder/girder_worker
|
from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
Add a minimum value to fibonacci example
|
from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer, min=1)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
<commit_before>from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
<commit_msg>Add a minimum value to fibonacci example<commit_after>
|
from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer, min=1)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
Add a minimum value to fibonacci examplefrom girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer, min=1)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
<commit_before>from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
<commit_msg>Add a minimum value to fibonacci example<commit_after>from girder_worker.app import app
@app.task
@app.argument('n', app.types.Integer, min=1)
def fibonacci(n):
"""Compute the nth fibonacci number recursively."""
if n == 1 or n == 2:
return 1
return fibonacci(n-1) + fibonacci(n-2)
|
06dc49becd393e07086e368b26ab1aea3a9bc149
|
pyelasticsearch/tests/__init__.py
|
pyelasticsearch/tests/__init__.py
|
"""
Unit tests for pyelasticsearch
These require an elasticsearch server running on the default port
(localhost:9200).
"""
import unittest
from nose.tools import eq_
# Test that __all__ is sufficient:
from pyelasticsearch import *
class ElasticSearchTestCase(unittest.TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
|
"""
Unit tests for pyelasticsearch
These require a local elasticsearch server running on the default port
(localhost:9200).
"""
from time import sleep
from unittest import TestCase
from nose import SkipTest
from nose.tools import eq_
from six.moves import xrange
# Test that __all__ is sufficient:
from pyelasticsearch import *
def setUp():
"""When loading the test package, wait for ES to come up."""
for _ in xrange(200):
try:
ElasticSearch().health(wait_for_status='yellow')
return
except ConnectionError:
sleep(.1)
raise SkipTest('Could not connect to the ES server.')
class ElasticSearchTestCase(TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
|
Add a wait to see if we can get Travis passing again.
|
Add a wait to see if we can get Travis passing again.
|
Python
|
bsd-3-clause
|
erikrose/pyelasticsearch
|
"""
Unit tests for pyelasticsearch
These require an elasticsearch server running on the default port
(localhost:9200).
"""
import unittest
from nose.tools import eq_
# Test that __all__ is sufficient:
from pyelasticsearch import *
class ElasticSearchTestCase(unittest.TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
Add a wait to see if we can get Travis passing again.
|
"""
Unit tests for pyelasticsearch
These require a local elasticsearch server running on the default port
(localhost:9200).
"""
from time import sleep
from unittest import TestCase
from nose import SkipTest
from nose.tools import eq_
from six.moves import xrange
# Test that __all__ is sufficient:
from pyelasticsearch import *
def setUp():
"""When loading the test package, wait for ES to come up."""
for _ in xrange(200):
try:
ElasticSearch().health(wait_for_status='yellow')
return
except ConnectionError:
sleep(.1)
raise SkipTest('Could not connect to the ES server.')
class ElasticSearchTestCase(TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
|
<commit_before>"""
Unit tests for pyelasticsearch
These require an elasticsearch server running on the default port
(localhost:9200).
"""
import unittest
from nose.tools import eq_
# Test that __all__ is sufficient:
from pyelasticsearch import *
class ElasticSearchTestCase(unittest.TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
<commit_msg>Add a wait to see if we can get Travis passing again.<commit_after>
|
"""
Unit tests for pyelasticsearch
These require a local elasticsearch server running on the default port
(localhost:9200).
"""
from time import sleep
from unittest import TestCase
from nose import SkipTest
from nose.tools import eq_
from six.moves import xrange
# Test that __all__ is sufficient:
from pyelasticsearch import *
def setUp():
"""When loading the test package, wait for ES to come up."""
for _ in xrange(200):
try:
ElasticSearch().health(wait_for_status='yellow')
return
except ConnectionError:
sleep(.1)
raise SkipTest('Could not connect to the ES server.')
class ElasticSearchTestCase(TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
|
"""
Unit tests for pyelasticsearch
These require an elasticsearch server running on the default port
(localhost:9200).
"""
import unittest
from nose.tools import eq_
# Test that __all__ is sufficient:
from pyelasticsearch import *
class ElasticSearchTestCase(unittest.TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
Add a wait to see if we can get Travis passing again."""
Unit tests for pyelasticsearch
These require a local elasticsearch server running on the default port
(localhost:9200).
"""
from time import sleep
from unittest import TestCase
from nose import SkipTest
from nose.tools import eq_
from six.moves import xrange
# Test that __all__ is sufficient:
from pyelasticsearch import *
def setUp():
"""When loading the test package, wait for ES to come up."""
for _ in xrange(200):
try:
ElasticSearch().health(wait_for_status='yellow')
return
except ConnectionError:
sleep(.1)
raise SkipTest('Could not connect to the ES server.')
class ElasticSearchTestCase(TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
|
<commit_before>"""
Unit tests for pyelasticsearch
These require an elasticsearch server running on the default port
(localhost:9200).
"""
import unittest
from nose.tools import eq_
# Test that __all__ is sufficient:
from pyelasticsearch import *
class ElasticSearchTestCase(unittest.TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
<commit_msg>Add a wait to see if we can get Travis passing again.<commit_after>"""
Unit tests for pyelasticsearch
These require a local elasticsearch server running on the default port
(localhost:9200).
"""
from time import sleep
from unittest import TestCase
from nose import SkipTest
from nose.tools import eq_
from six.moves import xrange
# Test that __all__ is sufficient:
from pyelasticsearch import *
def setUp():
"""When loading the test package, wait for ES to come up."""
for _ in xrange(200):
try:
ElasticSearch().health(wait_for_status='yellow')
return
except ConnectionError:
sleep(.1)
raise SkipTest('Could not connect to the ES server.')
class ElasticSearchTestCase(TestCase):
def setUp(self):
self.conn = ElasticSearch()
def tearDown(self):
try:
self.conn.delete_index('test-index')
except Exception:
pass
def assert_result_contains(self, result, expected):
for (key, value) in expected.items():
eq_(value, result[key])
return True
|
d716dba6e61f4f7fcb2962dff06fc0d022bd04af
|
registration/__init__.py
|
registration/__init__.py
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
Add utility function for retrieving the active registration backend.
|
Add utility function for retrieving the active registration backend.
|
Python
|
bsd-3-clause
|
austinhappel/django-registration,danielsamuels/django-registration,Troyhy/django-registration,gone/django-registration,ubernostrum/django-registration,sandipagr/django-registration,liberation/django-registration,akvo/django-registration,artursmet/django-registration,Troyhy/django-registration,hacklabr/django-registration,sandipagr/django-registration,awakeup/django-registration,dirtycoder/django-registration,jnns/django-registration,mypebble/djregs,tdruez/django-registration,liberation/django-registration,futurecolors/django-registration,artursmet/django-registration,gone/django-registration,austinhappel/django-registration,euanlau/django-registration,kennydude/djregs,spurfly/django-registration,hacklabr/django-registration,spurfly/django-registration,akvo/django-registration,futurecolors/django-registration,myimages/django-registration,euanlau/django-registration
|
Add utility function for retrieving the active registration backend.
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
<commit_before><commit_msg>Add utility function for retrieving the active registration backend.<commit_after>
|
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
Add utility function for retrieving the active registration backend.from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
<commit_before><commit_msg>Add utility function for retrieving the active registration backend.<commit_after>from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
|
|
fbb0abe3bdb62ec64bfdd03f9b45ded4def9613a
|
wsgi_intercept/test/test_mechanize.py
|
wsgi_intercept/test/test_mechanize.py
|
from nose.tools import with_setup, raises
from urllib2 import URLError
from wsgi_intercept.mechanize_intercept import Browser
import wsgi_intercept
from wsgi_intercept import test_wsgi_app
from mechanize import Browser as MechanizeBrowser
###
_saved_debuglevel = None
def add_intercept():
# _saved_debuglevel, wsgi_intercept.debuglevel = wsgi_intercept.debuglevel, 1
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 80, test_wsgi_app.create_fn)
def add_https_intercept():
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 443, test_wsgi_app.create_fn)
def remove_intercept():
wsgi_intercept.remove_wsgi_intercept('some_hopefully_nonexistant_domain', 80)
# wsgi_intercept.debuglevel = _saved_debuglevel
@with_setup(add_intercept, remove_intercept)
def test_intercepted():
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
assert test_wsgi_app.success()
@with_setup(add_intercept)
@raises(URLError)
def test_intercept_removed():
remove_intercept()
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
@with_setup(add_https_intercept, remove_intercept)
def test_https_intercept():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain:443/')
assert test_wsgi_app.success()
@with_setup(add_intercept, remove_intercept)
def test_https_intercept_default_port():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain/')
assert test_wsgi_app.success()
|
from urllib2 import URLError
from wsgi_intercept import testing
from wsgi_intercept.testing import unittest
from wsgi_intercept.test import base
try:
import mechanize
has_mechanize = True
except ImportError:
has_mechanize = False
_skip_message = "mechanize is not installed"
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpTestCase(base.BaseTestCase):
port = 80
def make_one(self, *args):
from mechanize import Browser
return Browser(*args)
def test_intercepted(self):
b = self.make_one()
b.open(self.url)
self.assertTrue(testing.success())
def test_intercept_removed():
remove_intercept()
b = self.make_one()
with self.assertRaises(URLError):
b.open(self.url)
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpsTestCase(MechanizeHttpTestCase):
port = 443
|
Use unittest in the mechanize related tests.
|
Use unittest in the mechanize related tests.
|
Python
|
mit
|
pumazi/wsgi_intercept2
|
from nose.tools import with_setup, raises
from urllib2 import URLError
from wsgi_intercept.mechanize_intercept import Browser
import wsgi_intercept
from wsgi_intercept import test_wsgi_app
from mechanize import Browser as MechanizeBrowser
###
_saved_debuglevel = None
def add_intercept():
# _saved_debuglevel, wsgi_intercept.debuglevel = wsgi_intercept.debuglevel, 1
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 80, test_wsgi_app.create_fn)
def add_https_intercept():
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 443, test_wsgi_app.create_fn)
def remove_intercept():
wsgi_intercept.remove_wsgi_intercept('some_hopefully_nonexistant_domain', 80)
# wsgi_intercept.debuglevel = _saved_debuglevel
@with_setup(add_intercept, remove_intercept)
def test_intercepted():
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
assert test_wsgi_app.success()
@with_setup(add_intercept)
@raises(URLError)
def test_intercept_removed():
remove_intercept()
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
@with_setup(add_https_intercept, remove_intercept)
def test_https_intercept():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain:443/')
assert test_wsgi_app.success()
@with_setup(add_intercept, remove_intercept)
def test_https_intercept_default_port():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain/')
assert test_wsgi_app.success()Use unittest in the mechanize related tests.
|
from urllib2 import URLError
from wsgi_intercept import testing
from wsgi_intercept.testing import unittest
from wsgi_intercept.test import base
try:
import mechanize
has_mechanize = True
except ImportError:
has_mechanize = False
_skip_message = "mechanize is not installed"
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpTestCase(base.BaseTestCase):
port = 80
def make_one(self, *args):
from mechanize import Browser
return Browser(*args)
def test_intercepted(self):
b = self.make_one()
b.open(self.url)
self.assertTrue(testing.success())
def test_intercept_removed():
remove_intercept()
b = self.make_one()
with self.assertRaises(URLError):
b.open(self.url)
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpsTestCase(MechanizeHttpTestCase):
port = 443
|
<commit_before>
from nose.tools import with_setup, raises
from urllib2 import URLError
from wsgi_intercept.mechanize_intercept import Browser
import wsgi_intercept
from wsgi_intercept import test_wsgi_app
from mechanize import Browser as MechanizeBrowser
###
_saved_debuglevel = None
def add_intercept():
# _saved_debuglevel, wsgi_intercept.debuglevel = wsgi_intercept.debuglevel, 1
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 80, test_wsgi_app.create_fn)
def add_https_intercept():
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 443, test_wsgi_app.create_fn)
def remove_intercept():
wsgi_intercept.remove_wsgi_intercept('some_hopefully_nonexistant_domain', 80)
# wsgi_intercept.debuglevel = _saved_debuglevel
@with_setup(add_intercept, remove_intercept)
def test_intercepted():
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
assert test_wsgi_app.success()
@with_setup(add_intercept)
@raises(URLError)
def test_intercept_removed():
remove_intercept()
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
@with_setup(add_https_intercept, remove_intercept)
def test_https_intercept():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain:443/')
assert test_wsgi_app.success()
@with_setup(add_intercept, remove_intercept)
def test_https_intercept_default_port():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain/')
assert test_wsgi_app.success()<commit_msg>Use unittest in the mechanize related tests.<commit_after>
|
from urllib2 import URLError
from wsgi_intercept import testing
from wsgi_intercept.testing import unittest
from wsgi_intercept.test import base
try:
import mechanize
has_mechanize = True
except ImportError:
has_mechanize = False
_skip_message = "mechanize is not installed"
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpTestCase(base.BaseTestCase):
port = 80
def make_one(self, *args):
from mechanize import Browser
return Browser(*args)
def test_intercepted(self):
b = self.make_one()
b.open(self.url)
self.assertTrue(testing.success())
def test_intercept_removed():
remove_intercept()
b = self.make_one()
with self.assertRaises(URLError):
b.open(self.url)
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpsTestCase(MechanizeHttpTestCase):
port = 443
|
from nose.tools import with_setup, raises
from urllib2 import URLError
from wsgi_intercept.mechanize_intercept import Browser
import wsgi_intercept
from wsgi_intercept import test_wsgi_app
from mechanize import Browser as MechanizeBrowser
###
_saved_debuglevel = None
def add_intercept():
# _saved_debuglevel, wsgi_intercept.debuglevel = wsgi_intercept.debuglevel, 1
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 80, test_wsgi_app.create_fn)
def add_https_intercept():
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 443, test_wsgi_app.create_fn)
def remove_intercept():
wsgi_intercept.remove_wsgi_intercept('some_hopefully_nonexistant_domain', 80)
# wsgi_intercept.debuglevel = _saved_debuglevel
@with_setup(add_intercept, remove_intercept)
def test_intercepted():
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
assert test_wsgi_app.success()
@with_setup(add_intercept)
@raises(URLError)
def test_intercept_removed():
remove_intercept()
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
@with_setup(add_https_intercept, remove_intercept)
def test_https_intercept():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain:443/')
assert test_wsgi_app.success()
@with_setup(add_intercept, remove_intercept)
def test_https_intercept_default_port():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain/')
assert test_wsgi_app.success()Use unittest in the mechanize related tests.from urllib2 import URLError
from wsgi_intercept import testing
from wsgi_intercept.testing import unittest
from wsgi_intercept.test import base
try:
import mechanize
has_mechanize = True
except ImportError:
has_mechanize = False
_skip_message = "mechanize is not installed"
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpTestCase(base.BaseTestCase):
port = 80
def make_one(self, *args):
from mechanize import Browser
return Browser(*args)
def test_intercepted(self):
b = self.make_one()
b.open(self.url)
self.assertTrue(testing.success())
def test_intercept_removed():
remove_intercept()
b = self.make_one()
with self.assertRaises(URLError):
b.open(self.url)
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpsTestCase(MechanizeHttpTestCase):
port = 443
|
<commit_before>
from nose.tools import with_setup, raises
from urllib2 import URLError
from wsgi_intercept.mechanize_intercept import Browser
import wsgi_intercept
from wsgi_intercept import test_wsgi_app
from mechanize import Browser as MechanizeBrowser
###
_saved_debuglevel = None
def add_intercept():
# _saved_debuglevel, wsgi_intercept.debuglevel = wsgi_intercept.debuglevel, 1
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 80, test_wsgi_app.create_fn)
def add_https_intercept():
wsgi_intercept.add_wsgi_intercept('some_hopefully_nonexistant_domain', 443, test_wsgi_app.create_fn)
def remove_intercept():
wsgi_intercept.remove_wsgi_intercept('some_hopefully_nonexistant_domain', 80)
# wsgi_intercept.debuglevel = _saved_debuglevel
@with_setup(add_intercept, remove_intercept)
def test_intercepted():
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
assert test_wsgi_app.success()
@with_setup(add_intercept)
@raises(URLError)
def test_intercept_removed():
remove_intercept()
b = Browser()
b.open('http://some_hopefully_nonexistant_domain:80/')
@with_setup(add_https_intercept, remove_intercept)
def test_https_intercept():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain:443/')
assert test_wsgi_app.success()
@with_setup(add_intercept, remove_intercept)
def test_https_intercept_default_port():
b = Browser()
b.open('https://some_hopefully_nonexistant_domain/')
assert test_wsgi_app.success()<commit_msg>Use unittest in the mechanize related tests.<commit_after>from urllib2 import URLError
from wsgi_intercept import testing
from wsgi_intercept.testing import unittest
from wsgi_intercept.test import base
try:
import mechanize
has_mechanize = True
except ImportError:
has_mechanize = False
_skip_message = "mechanize is not installed"
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpTestCase(base.BaseTestCase):
port = 80
def make_one(self, *args):
from mechanize import Browser
return Browser(*args)
def test_intercepted(self):
b = self.make_one()
b.open(self.url)
self.assertTrue(testing.success())
def test_intercept_removed():
remove_intercept()
b = self.make_one()
with self.assertRaises(URLError):
b.open(self.url)
@unittest.skipUnless(has_mechanize, _skip_message)
class MechanizeHttpsTestCase(MechanizeHttpTestCase):
port = 443
|
c0e87b32f9f3c5e306fb553990754ff4aae9dc3c
|
hub/urls.py
|
hub/urls.py
|
"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/$', views.api.StatusApiView.as_view()),
]
|
"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/?$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/?$', views.api.StatusApiView.as_view()),
]
|
Make trailing slash optional for API URLs.
|
Make trailing slash optional for API URLs.
Admin website URLs will still redirect to canonical version with trailing slash.
|
Python
|
mit
|
kblum/sensor-hub
|
"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/$', views.api.StatusApiView.as_view()),
]
Make trailing slash optional for API URLs.
Admin website URLs will still redirect to canonical version with trailing slash.
|
"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/?$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/?$', views.api.StatusApiView.as_view()),
]
|
<commit_before>"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/$', views.api.StatusApiView.as_view()),
]
<commit_msg>Make trailing slash optional for API URLs.
Admin website URLs will still redirect to canonical version with trailing slash.<commit_after>
|
"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/?$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/?$', views.api.StatusApiView.as_view()),
]
|
"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/$', views.api.StatusApiView.as_view()),
]
Make trailing slash optional for API URLs.
Admin website URLs will still redirect to canonical version with trailing slash."""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/?$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/?$', views.api.StatusApiView.as_view()),
]
|
<commit_before>"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/$', views.api.StatusApiView.as_view()),
]
<commit_msg>Make trailing slash optional for API URLs.
Admin website URLs will still redirect to canonical version with trailing slash.<commit_after>"""hub URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from .sensorhub import views
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^api/temperatures/?$', views.api.TemperatureApiView.as_view()),
url(r'^api/status/?$', views.api.StatusApiView.as_view()),
]
|
0ea3b4eca6cbd70200ffc3a844cedf1b4a427a42
|
src/doc/help2man_preformat.py
|
src/doc/help2man_preformat.py
|
#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is avaliable in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
|
#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is available in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
|
Fix typo in man pages
|
Fix typo in man pages
|
Python
|
bsd-3-clause
|
OpenImageIO/oiio,OpenImageIO/oiio,OpenImageIO/oiio,lgritz/oiio,OpenImageIO/oiio,lgritz/oiio,lgritz/oiio,lgritz/oiio
|
#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is avaliable in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
Fix typo in man pages
|
#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is available in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
|
<commit_before>#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is avaliable in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
<commit_msg>Fix typo in man pages<commit_after>
|
#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is available in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
|
#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is avaliable in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
Fix typo in man pages#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is available in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
|
<commit_before>#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is avaliable in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
<commit_msg>Fix typo in man pages<commit_after>#!/usr/bin/python
# Format the output from various oiio command line "$tool --help" invocations,
# and munge such that txt2man generates a simple man page with not-too-horrible
# formatting.
from __future__ import print_function
from __future__ import absolute_import
import sys
lines = [l.rstrip().replace('\t', ' '*8) for l in sys.stdin.readlines()]
print('TITLE')
print(lines[0])
print()
print('SYNOPSIS')
for i,line in enumerate(lines[2:]):
if line.lstrip().startswith('-') or line.lstrip().startswith('Options'):
optStart = i+2
break
print(line)
print('''DESCRIPTION
This program is part of the OpenImageIO (http://www.openimageio.org) tool suite.
Detailed documentation is available in pdf format with the OpenImageIO
distribution.
''')
print('OPTIONS')
for line in lines[optStart:]:
if not line.startswith(' '):
print()
print(line)
elif not line.lstrip().startswith('-'):
print(line.lstrip())
else:
print(line)
print()
|
697c590bf60c261280e55f8580b33423dbe800c6
|
splinter/driver/webdriver/firefox.py
|
splinter/driver/webdriver/firefox.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
firefox_profile.set_preference('network.dns.disableIPv6', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
|
Fix error on Firefox 6 where pages are not open if this preference is True (default).
|
Fix error on Firefox 6 where pages are not open if this preference is True (default).
|
Python
|
bsd-3-clause
|
bmcculley/splinter,cobrateam/splinter,bmcculley/splinter,nikolas/splinter,drptbl/splinter,objarni/splinter,nikolas/splinter,cobrateam/splinter,drptbl/splinter,underdogio/splinter,underdogio/splinter,bubenkoff/splinter,lrowe/splinter,bubenkoff/splinter,lrowe/splinter,objarni/splinter,gjvis/splinter,bmcculley/splinter,underdogio/splinter,objarni/splinter,lrowe/splinter,nikolas/splinter,cobrateam/splinter,gjvis/splinter,myself659/splinter,myself659/splinter,myself659/splinter,gjvis/splinter,drptbl/splinter
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
Fix error on Firefox 6 where pages are not open if this preference is True (default).
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
firefox_profile.set_preference('network.dns.disableIPv6', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
<commit_msg>Fix error on Firefox 6 where pages are not open if this preference is True (default).<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
firefox_profile.set_preference('network.dns.disableIPv6', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
Fix error on Firefox 6 where pages are not open if this preference is True (default).#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
firefox_profile.set_preference('network.dns.disableIPv6', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
<commit_msg>Fix error on Firefox 6 where pages are not open if this preference is True (default).<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import BaseWebDriver, WebDriverElement as BaseWebDriverElement
from splinter.driver.webdriver.cookie_manager import CookieManager
class WebDriver(BaseWebDriver):
def __init__(self, profile=None, extensions=None):
self.old_popen = subprocess.Popen
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference('extensions.logging.enabled', False)
firefox_profile.set_preference('network.dns.disableIPv6', False)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
self._patch_subprocess()
self.driver = Firefox(firefox_profile)
self._unpatch_subprocess()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__()
class WebDriverElement(BaseWebDriverElement):
def mouseover(self):
"""
Firefox doesn't support mouseover.
"""
raise NotImplementedError("Firefox doesn't support mouse over")
def mouseout(self):
"""
Firefox doesn't support mouseout.
"""
raise NotImplementedError("Firefox doesn't support mouseout")
def double_click(self):
"""
Firefox doesn't support doubleclick.
"""
raise NotImplementedError("Firefox doesn't support doubleclick")
|
5009a88b16e8776e87a338796baa6d8e60c99ee7
|
police_api/resource.py
|
police_api/resource.py
|
import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data[field]))
|
import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data.get(field)))
|
Fix IndexError bug in _hydrate
|
Fix IndexError bug in _hydrate
|
Python
|
mit
|
rkhleics/police-api-client-python
|
import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data[field]))
Fix IndexError bug in _hydrate
|
import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data.get(field)))
|
<commit_before>import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data[field]))
<commit_msg>Fix IndexError bug in _hydrate<commit_after>
|
import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data.get(field)))
|
import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data[field]))
Fix IndexError bug in _hydrateimport requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data.get(field)))
|
<commit_before>import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data[field]))
<commit_msg>Fix IndexError bug in _hydrate<commit_after>import requests
API_URL = 'http://data.police.uk/api/'
class APIError(Exception):
pass
def api_request(method):
r = requests.get(API_URL + method)
if r.status_code != 200:
raise APIError(r.status_code)
return r.json()
class Resource(object):
_requested = False
api_method = None
fields = []
def __getattr__(self, attr):
if not self._requested and attr in self.fields:
self._make_api_request()
return self.__getattribute__(attr)
def _make_api_request(self):
self._response_data = api_request(self._get_api_method())
self._hydrate()
self._requested = True
def _get_api_method(self):
if self.api_method is None:
raise RuntimeError('You must set the api_method attribute')
return self.api_method
def _hydrate(self):
for field in self.fields:
hydrate_field = getattr(self, '_hydrate_%s' % field, lambda x: x)
setattr(self, field, hydrate_field(self._response_data.get(field)))
|
f7dea9dbfc5714be08e6cf9f146ae9eca21929c3
|
test/on_yubikey/cli_piv/test_misc.py
|
test/on_yubikey/cli_piv/test_misc.py
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEquals('test data\n', output)
return [Misc]
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
Fix test warning about wrong assert function
|
Fix test warning about wrong assert function
|
Python
|
bsd-2-clause
|
Yubico/yubikey-manager,Yubico/yubikey-manager
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEquals('test data\n', output)
return [Misc]
Fix test warning about wrong assert function
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
<commit_before>import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEquals('test data\n', output)
return [Misc]
<commit_msg>Fix test warning about wrong assert function<commit_after>
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEquals('test data\n', output)
return [Misc]
Fix test warning about wrong assert functionimport unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
<commit_before>import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEquals('test data\n', output)
return [Misc]
<commit_msg>Fix test warning about wrong assert function<commit_after>import unittest
from ..framework import cli_test_suite
from .util import DEFAULT_MANAGEMENT_KEY
@cli_test_suite
def additional_tests(ykman_cli):
class Misc(unittest.TestCase):
def setUp(self):
ykman_cli('piv', 'reset', '-f')
def test_info(self):
output = ykman_cli('piv', 'info')
self.assertIn('PIV version:', output)
def test_reset(self):
output = ykman_cli('piv', 'reset', '-f')
self.assertIn('Success!', output)
def test_write_read_object(self):
ykman_cli(
'piv', 'write-object',
'-m', DEFAULT_MANAGEMENT_KEY, '0x5f0001',
'-', input='test data')
output = ykman_cli('piv', 'read-object', '0x5f0001')
self.assertEqual('test data\n', output)
return [Misc]
|
0e3952c0648375810b479d093e970d072db0fe6d
|
app/resources/forms.py
|
app/resources/forms.py
|
from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
|
from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
# Google Place Autocomplete example divs named for Google address schema
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
|
Comment about form field names for Google Autocomplete
|
Comment about form field names for Google Autocomplete
|
Python
|
mit
|
hack4impact/women-veterans-rock,hack4impact/women-veterans-rock,hack4impact/women-veterans-rock
|
from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
Comment about form field names for Google Autocomplete
|
from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
# Google Place Autocomplete example divs named for Google address schema
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
|
<commit_before>from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
<commit_msg>Comment about form field names for Google Autocomplete<commit_after>
|
from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
# Google Place Autocomplete example divs named for Google address schema
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
|
from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
Comment about form field names for Google Autocompletefrom flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
# Google Place Autocomplete example divs named for Google address schema
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
|
<commit_before>from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
<commit_msg>Comment about form field names for Google Autocomplete<commit_after>from flask.ext.wtf import Form
from wtforms.fields import (
StringField,
IntegerField,
SubmitField
)
from wtforms.validators import InputRequired, Length
class ResourceForm(Form):
autocomplete = StringField('Enter the address')
name = StringField('Name', validators=[
InputRequired(),
Length(1, 64)
])
description = StringField('Description', validators=[
InputRequired(),
])
website = StringField('Website')
street_number = IntegerField('Street Number', validators=[
InputRequired()
])
# Google Place Autocomplete example divs named for Google address schema
route = StringField('Street Address', validators=[
InputRequired()
])
locality = StringField('City', validators=[
InputRequired()
])
administrative_area_level_1 = StringField('State', validators=[
InputRequired()
])
postal_code = StringField('ZIP Code', validators=[
InputRequired(),
Length(5)
])
submit = SubmitField('Add Resource')
|
992b3302c4cb690e86436c54c43d0bb2aa406b0d
|
scrapi/harvesters/hacettepe_U_DIM.py
|
scrapi/harvesters/hacettepe_U_DIM.py
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
Change shortname and class name
|
Change shortname and class name
|
Python
|
apache-2.0
|
alexgarciac/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,ostwald/scrapi,CenterForOpenScience/scrapi,erinspace/scrapi,jeffreyliu3230/scrapi,mehanig/scrapi,erinspace/scrapi,felliott/scrapi,felliott/scrapi,fabianvf/scrapi
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
Change shortname and class name
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
<commit_before>'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
<commit_msg>Change shortname and class name<commit_after>
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
Change shortname and class name'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
<commit_before>'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class Hacettepe_u_dimHarvester(OAIHarvester):
short_name = 'hacettepe_U_DIM'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
<commit_msg>Change shortname and class name<commit_after>'''
Harvester for the DSpace on LibLiveCD for the SHARE project
Example API call: http://bbytezarsivi.hacettepe.edu.tr/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
from scrapi.base import OAIHarvester
class HacettepeHarvester(OAIHarvester):
short_name = 'hacettepe'
long_name = 'DSpace on LibLiveCD'
url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
base_url = 'http://bbytezarsivi.hacettepe.edu.tr/oai/request'
property_list = ['date', 'identifier', 'type', 'rights']
timezone_granularity = True
|
0da1b3984f1e518dffc55ac7d3c2d53ef4bf55cb
|
matchzoo/utils/util_preprocessor.py
|
matchzoo/utils/util_preprocessor.py
|
"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self._context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
|
"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self.context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
|
Update context checker by using accessor instead
|
Update context checker by using accessor instead
|
Python
|
apache-2.0
|
faneshion/MatchZoo,faneshion/MatchZoo
|
"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self._context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
Update context checker by using accessor instead
|
"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self.context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
|
<commit_before>"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self._context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
<commit_msg>Update context checker by using accessor instead<commit_after>
|
"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self.context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
|
"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self._context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
Update context checker by using accessor instead"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self.context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
|
<commit_before>"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self._context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
<commit_msg>Update context checker by using accessor instead<commit_after>"""Utils for preprocessors."""
def validate_context(func):
"""Validate context in the preprocessor."""
def transform_wrapper(self, *args, **kwargs):
if not self.context:
raise ValueError(
'Please fit parameters before transformation.')
return func(self, *args, **kwargs)
return transform_wrapper
|
312c0d463940257cb1f777d3720778550b5bdb2d
|
bluebottle/organizations/serializers.py
|
bluebottle/organizations/serializers.py
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
Revert "Make the name of an organization required"
|
Revert "Make the name of an organization required"
This reverts commit 02140561a29a2b7fe50f7bf2402da566e60be641.
|
Python
|
bsd-3-clause
|
jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
Revert "Make the name of an organization required"
This reverts commit 02140561a29a2b7fe50f7bf2402da566e60be641.
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
<commit_before>from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
<commit_msg>Revert "Make the name of an organization required"
This reverts commit 02140561a29a2b7fe50f7bf2402da566e60be641.<commit_after>
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
Revert "Make the name of an organization required"
This reverts commit 02140561a29a2b7fe50f7bf2402da566e60be641.from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
<commit_before>from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
<commit_msg>Revert "Make the name of an organization required"
This reverts commit 02140561a29a2b7fe50f7bf2402da566e60be641.<commit_after>from rest_framework import serializers
from bluebottle.organizations.models import Organization
from bluebottle.utils.serializers import URLField
class OrganizationSerializer(serializers.ModelSerializer):
class Meta:
model = Organization
fields = ('id', 'name', 'slug', 'address_line1', 'address_line2',
'city', 'state', 'country', 'postal_code', 'phone_number',
'website', 'email')
class ManageOrganizationSerializer(serializers.ModelSerializer):
slug = serializers.SlugField(required=False, allow_null=True)
name = serializers.CharField(required=True, allow_blank=True)
website = URLField(required=False, allow_blank=True)
email = serializers.EmailField(required=False, allow_blank=True)
class Meta:
model = Organization
fields = OrganizationSerializer.Meta.fields + ('partner_organizations',
'created', 'updated')
|
80fa2f3c47ddc845d4dc9e549df38f68267873d6
|
corehq/ex-submodules/pillow_retry/tasks.py
|
corehq/ex-submodules/pillow_retry/tasks.py
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
Send error-type info to pillow error DD metrics
|
Send error-type info to pillow error DD metrics
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
Send error-type info to pillow error DD metrics
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
<commit_before>from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
<commit_msg>Send error-type info to pillow error DD metrics<commit_after>
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
Send error-type info to pillow error DD metricsfrom celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
<commit_before>from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery'
])
<commit_msg>Send error-type info to pillow error DD metrics<commit_after>from celery.schedules import crontab
from celery.task import periodic_task
from django.conf import settings
from django.db.models import Count
from corehq.util.datadog.gauges import datadog_gauge
from pillow_retry.models import PillowError
@periodic_task(
run_every=crontab(minute="*/15"),
queue=settings.CELERY_PERIODIC_QUEUE,
)
def record_pillow_error_queue_size():
data = PillowError.objects.values('pillow').annotate(num_errors=Count('id'))
for row in data:
datadog_gauge('commcare.pillowtop.error_queue', row['num_errors'], tags=[
'pillow_name:%s' % row['pillow'],
'host:celery',
'group:celery',
'error_type:%s' % row['error_type']
])
|
3c64002217795e5d8d3eebb7b06f8ad72f342564
|
thinglang/parser/tokens/functions.py
|
thinglang/parser/tokens/functions.py
|
from thinglang.lexer.symbols.base import LexicalAccess
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.target, self.arguments = slice
self.value = self
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]
|
from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalClassInitialization
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def evaluate(self, resolver):
return resolver.resolve(self)
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.value = self
if isinstance(slice[0], LexicalClassInitialization):
self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])])
self.arguments = slice[2]
self.constructing_call = True
else:
self.target, self.arguments = slice
self.constructing_call = False
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]
|
Add proper support for constructor calls to MethodCall
|
Add proper support for constructor calls to MethodCall
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
from thinglang.lexer.symbols.base import LexicalAccess
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.target, self.arguments = slice
self.value = self
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]Add proper support for constructor calls to MethodCall
|
from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalClassInitialization
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def evaluate(self, resolver):
return resolver.resolve(self)
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.value = self
if isinstance(slice[0], LexicalClassInitialization):
self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])])
self.arguments = slice[2]
self.constructing_call = True
else:
self.target, self.arguments = slice
self.constructing_call = False
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]
|
<commit_before>from thinglang.lexer.symbols.base import LexicalAccess
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.target, self.arguments = slice
self.value = self
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]<commit_msg>Add proper support for constructor calls to MethodCall<commit_after>
|
from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalClassInitialization
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def evaluate(self, resolver):
return resolver.resolve(self)
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.value = self
if isinstance(slice[0], LexicalClassInitialization):
self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])])
self.arguments = slice[2]
self.constructing_call = True
else:
self.target, self.arguments = slice
self.constructing_call = False
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]
|
from thinglang.lexer.symbols.base import LexicalAccess
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.target, self.arguments = slice
self.value = self
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]Add proper support for constructor calls to MethodCallfrom thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalClassInitialization
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def evaluate(self, resolver):
return resolver.resolve(self)
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.value = self
if isinstance(slice[0], LexicalClassInitialization):
self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])])
self.arguments = slice[2]
self.constructing_call = True
else:
self.target, self.arguments = slice
self.constructing_call = False
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]
|
<commit_before>from thinglang.lexer.symbols.base import LexicalAccess
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.target, self.arguments = slice
self.value = self
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]<commit_msg>Add proper support for constructor calls to MethodCall<commit_after>from thinglang.lexer.symbols.base import LexicalAccess, LexicalIdentifier
from thinglang.lexer.symbols.functions import LexicalClassInitialization
from thinglang.parser.tokens import BaseToken, DefinitionPairToken
from thinglang.parser.tokens.collections import ListInitializationPartial, ListInitialization
from thinglang.utils.type_descriptors import ValueType
class Access(BaseToken):
def __init__(self, slice):
super(Access, self).__init__(slice)
self.target = [x for x in slice if not isinstance(x, LexicalAccess)]
def evaluate(self, resolver):
return resolver.resolve(self)
def describe(self):
return '.'.join(str(x) for x in self.target)
def __getitem__(self, item):
return self.target[item]
class ArgumentListPartial(ListInitializationPartial):
pass
class ArgumentListDecelerationPartial(ArgumentListPartial):
pass
class ArgumentList(ListInitialization):
pass
class MethodCall(BaseToken, ValueType):
def __init__(self, slice):
super(MethodCall, self).__init__(slice)
self.value = self
if isinstance(slice[0], LexicalClassInitialization):
self.target = Access([slice[1], LexicalIdentifier.constructor().contextify(slice[0])])
self.arguments = slice[2]
self.constructing_call = True
else:
self.target, self.arguments = slice
self.constructing_call = False
if not self.arguments:
self.arguments = ArgumentList()
def describe(self):
return 'target={}, args={}'.format(self.target, self.arguments)
def replace(self, original, replacement):
self.arguments.replace(original, replacement)
class ReturnStatement(DefinitionPairToken):
def __init__(self, slice):
super().__init__(slice)
self.value = slice[1]
|
acd84f19d8d8820aecdba62bf4d0c97a2d4bdf34
|
src/source_weather/source_weather.py
|
src/source_weather/source_weather.py
|
"""
Definition of a source than add dumb data
"""
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
|
"""
Definition of a source than add dumb data
"""
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
Access to actual or predicted weather done
|
Access to actual or predicted weather done
|
Python
|
unlicense
|
Aluriak/24hducode2016,Aluriak/24hducode2016
|
"""
Definition of a source than add dumb data
"""
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
Access to actual or predicted weather done
|
"""
Definition of a source than add dumb data
"""
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
<commit_before>"""
Definition of a source than add dumb data
"""
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
<commit_msg>Access to actual or predicted weather done<commit_after>
|
"""
Definition of a source than add dumb data
"""
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
"""
Definition of a source than add dumb data
"""
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
Access to actual or predicted weather done"""
Definition of a source than add dumb data
"""
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
<commit_before>"""
Definition of a source than add dumb data
"""
from src.source import Source
class SourceMock(Source):
"""Add a funny key with a funny value in the given dict"""
def __init__(self, funny_message="Java.OutOfMemoryError"
funny_key="Who's there ?"):
self.funny_message = funny_message
self.funny_key = funny_key
def enrichment(self, data_dict):
data_dict[self.funny_key] = self.funny_message
return data_dict
def keywords(self):
return {self.funny_key}
<commit_msg>Access to actual or predicted weather done<commit_after>"""
Definition of a source than add dumb data
"""
from src.source import Source
from . import weather
class SourceWeaver(Source):
"""
Throught Open Weather Map generates today weather and
expected weather for next days, if possible
"""
def enrichment(self, data_dict):
if default.FIELD_COORDINATES in data_dict:
lat, lon = data_dict[default.FIELD_COORDINATES]
data_dict[default.FIELD_WEATHER] = weather.actual(lat, lon)
if default.FIELD_DATE in data_dict:
date = data_dict[default.FIELD_DATE]
if weather.is_predictable(date):
data_dict[default.FIELD_WEATHER_PREDICTED] = weather.predicted(lat, lon)[str(default.FIELD_DATE)]
return data_dict
def keywords(self):
return {default.FIELD_WEATHER_PREDICTED,
default.FIELD_WEATHER}
|
0d6d28c437b1da4c29b2a9115c4ff8cab038eb53
|
pyvac/task/__init__.py
|
pyvac/task/__init__.py
|
# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=False)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
|
# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=True)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
|
Use scoped session for celery tasks
|
Use scoped session for celery tasks
|
Python
|
bsd-3-clause
|
sayoun/pyvac,doyousoft/pyvac,doyousoft/pyvac,sayoun/pyvac,sayoun/pyvac,doyousoft/pyvac
|
# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=False)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
Use scoped session for celery tasks
|
# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=True)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
|
<commit_before># -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=False)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
<commit_msg>Use scoped session for celery tasks<commit_after>
|
# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=True)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
|
# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=False)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
Use scoped session for celery tasks# -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=True)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
|
<commit_before># -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=False)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
<commit_msg>Use scoped session for celery tasks<commit_after># -*- coding: utf-8 -*-
import sys
import yaml
from celery.signals import worker_process_init
from pyvac.helpers.sqla import create_engine
from pyvac.helpers.ldap import LdapCache
from pyvac.helpers.mail import SmtpCache
try:
from yaml import CSafeLoader as YAMLLoader
except ImportError:
from yaml import SafeLoader as YAMLLoader
@worker_process_init.connect
def configure_workers(sender=None, conf=None, **kwargs):
# The Worker (child process of the celeryd) must have
# it's own SQL Connection (A unix forking operation preserve fd)
with open(sys.argv[1]) as fdesc:
conf = yaml.load(fdesc, YAMLLoader)
# XXX Register the database
create_engine('pyvac', conf.get('databases').get('pyvac'),
scoped=True)
LdapCache.configure(conf.get('ldap').get('conf'))
SmtpCache.configure(conf.get('smtp'))
|
d8e876fc60d96f0d635862e845ae565ef3e2afb9
|
openpnm/models/geometry/__init__.py
|
openpnm/models/geometry/__init__.py
|
r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_area
from . import throat_equivalent_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
|
r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_centroid
from . import throat_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
|
Update init file in models.geometry
|
Update init file in models.geometry
|
Python
|
mit
|
TomTranter/OpenPNM,PMEAL/OpenPNM
|
r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_area
from . import throat_equivalent_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
Update init file in models.geometry
|
r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_centroid
from . import throat_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
|
<commit_before>r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_area
from . import throat_equivalent_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
<commit_msg>Update init file in models.geometry<commit_after>
|
r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_centroid
from . import throat_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
|
r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_area
from . import throat_equivalent_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
Update init file in models.geometryr"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_centroid
from . import throat_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
|
<commit_before>r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_area
from . import throat_equivalent_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
<commit_msg>Update init file in models.geometry<commit_after>r"""
**openpnm.models.geometry**
----
This submodule contains pore-scale models that calculate geometrical properties
"""
from . import pore_size
from . import pore_seed
from . import pore_volume
from . import pore_surface_area
from . import pore_area
from . import throat_centroid
from . import throat_area
from . import throat_size
from . import throat_length
from . import throat_perimeter
from . import throat_surface_area
from . import throat_volume
from . import throat_shape_factor
|
e1a4839475b87e3ce02a12465b18114c7c85f31b
|
ueberwachungspaket/decorators.py
|
ueberwachungspaket/decorators.py
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url,
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url.encode("idna"),
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
Convert Twilio URL to IDN.
|
Convert Twilio URL to IDN.
|
Python
|
mit
|
AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url,
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
Convert Twilio URL to IDN.
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url.encode("idna"),
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
<commit_before>from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url,
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
<commit_msg>Convert Twilio URL to IDN.<commit_after>
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url.encode("idna"),
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url,
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
Convert Twilio URL to IDN.from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url.encode("idna"),
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
<commit_before>from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url,
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
<commit_msg>Convert Twilio URL to IDN.<commit_after>from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
from config import *
def validate_twilio_request(f):
@wraps(f)
def decorated_function(*args, **kwargs):
validator = RequestValidator(TWILIO_SECRET)
request_valid = validator.validate(
request.url.encode("idna"),
request.form,
request.headers.get("X-TWILIO-SIGNATURE", ""))
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
f7959e3f0727bcab47cdf3b8f1250bbb45788af0
|
skimage/_shared/utils.py
|
skimage/_shared/utils.py
|
import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
|
import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use ``%s`` instead.' % self.alt_func
msg = 'Call to deprecated function ``%s``.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
return wrapped
|
Fix doc string injection of deprecated wrapper
|
Fix doc string injection of deprecated wrapper
|
Python
|
bsd-3-clause
|
michaelpacer/scikit-image,vighneshbirodkar/scikit-image,chintak/scikit-image,michaelaye/scikit-image,michaelpacer/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,blink1073/scikit-image,ofgulban/scikit-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,robintw/scikit-image,paalge/scikit-image,dpshelio/scikit-image,WarrenWeckesser/scikits-image,emon10005/scikit-image,juliusbierk/scikit-image,newville/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,oew1v07/scikit-image,youprofit/scikit-image,ofgulban/scikit-image,rjeli/scikit-image,bsipocz/scikit-image,paalge/scikit-image,chintak/scikit-image,GaZ3ll3/scikit-image,ClinicalGraphics/scikit-image,SamHames/scikit-image,rjeli/scikit-image,almarklein/scikit-image,ajaybhat/scikit-image,newville/scikit-image,emon10005/scikit-image,Hiyorimi/scikit-image,warmspringwinds/scikit-image,michaelaye/scikit-image,robintw/scikit-image,chintak/scikit-image,SamHames/scikit-image,dpshelio/scikit-image,paalge/scikit-image,blink1073/scikit-image,warmspringwinds/scikit-image,jwiggins/scikit-image,keflavich/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,WarrenWeckesser/scikits-image,pratapvardhan/scikit-image,GaZ3ll3/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,SamHames/scikit-image,ofgulban/scikit-image,Midafi/scikit-image,youprofit/scikit-image,ajaybhat/scikit-image,bennlich/scikit-image,bennlich/scikit-image,almarklein/scikit-image,vighneshbirodkar/scikit-image,rjeli/scikit-image,keflavich/scikit-image,Britefury/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,bsipocz/scikit-image,chintak/scikit-image,Midafi/scikit-image
|
import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
Fix doc string injection of deprecated wrapper
|
import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use ``%s`` instead.' % self.alt_func
msg = 'Call to deprecated function ``%s``.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
return wrapped
|
<commit_before>import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
<commit_msg>Fix doc string injection of deprecated wrapper<commit_after>
|
import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use ``%s`` instead.' % self.alt_func
msg = 'Call to deprecated function ``%s``.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
return wrapped
|
import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
Fix doc string injection of deprecated wrapperimport warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use ``%s`` instead.' % self.alt_func
msg = 'Call to deprecated function ``%s``.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
return wrapped
|
<commit_before>import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use `%s` instead.' % self.alt_func
msg = 'Call to deprecated function `%s`.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n' + wrapped.__doc__
return wrapped
<commit_msg>Fix doc string injection of deprecated wrapper<commit_after>import warnings
import functools
__all__ = ['deprecated']
class deprecated(object):
"""Decorator to mark deprecated functions with warning.
Adapted from <http://wiki.python.org/moin/PythonDecoratorLibrary>.
Parameters
----------
alt_func : str
If given, tell user what function to use instead.
behavior : {'warn', 'raise'}
Behavior during call to deprecated function: 'warn' = warn user that
function is deprecated; 'raise' = raise error.
"""
def __init__(self, alt_func=None, behavior='warn'):
self.alt_func = alt_func
self.behavior = behavior
def __call__(self, func):
alt_msg = ''
if self.alt_func is not None:
alt_msg = ' Use ``%s`` instead.' % self.alt_func
msg = 'Call to deprecated function ``%s``.' % func.__name__
msg += alt_msg
@functools.wraps(func)
def wrapped(*args, **kwargs):
if self.behavior == 'warn':
warnings.warn_explicit(msg,
category=DeprecationWarning,
filename=func.func_code.co_filename,
lineno=func.func_code.co_firstlineno + 1)
elif self.behavior == 'raise':
raise DeprecationWarning(msg)
return func(*args, **kwargs)
# modify doc string to display deprecation warning
doc = '**Deprecated function**.' + alt_msg
if wrapped.__doc__ is None:
wrapped.__doc__ = doc
else:
wrapped.__doc__ = doc + '\n\n ' + wrapped.__doc__
return wrapped
|
41221b36a596b1253445f1e49b10bff1fc44be42
|
tests/test_it.py
|
tests/test_it.py
|
import requests
def test_notifications_admin_index():
response = requests.request("GET", "http://notifications-admin.herokuapp.com/index")
assert response.status_code == 200
# assert response.content == 'Hello from notifications-admin'
|
import requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in response.content
|
Fix test for initial registration page flow
|
Fix test for initial registration page flow
|
Python
|
mit
|
alphagov/notifications-functional-tests,alphagov/notifications-functional-tests
|
import requests
def test_notifications_admin_index():
response = requests.request("GET", "http://notifications-admin.herokuapp.com/index")
assert response.status_code == 200
# assert response.content == 'Hello from notifications-admin'
Fix test for initial registration page flow
|
import requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in response.content
|
<commit_before>import requests
def test_notifications_admin_index():
response = requests.request("GET", "http://notifications-admin.herokuapp.com/index")
assert response.status_code == 200
# assert response.content == 'Hello from notifications-admin'
<commit_msg>Fix test for initial registration page flow<commit_after>
|
import requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in response.content
|
import requests
def test_notifications_admin_index():
response = requests.request("GET", "http://notifications-admin.herokuapp.com/index")
assert response.status_code == 200
# assert response.content == 'Hello from notifications-admin'
Fix test for initial registration page flowimport requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in response.content
|
<commit_before>import requests
def test_notifications_admin_index():
response = requests.request("GET", "http://notifications-admin.herokuapp.com/index")
assert response.status_code == 200
# assert response.content == 'Hello from notifications-admin'
<commit_msg>Fix test for initial registration page flow<commit_after>import requests
def test_notifications_admin_index():
# response = requests.request("GET", "http://localhost:6012")
response = requests.request("GET", "http://notifications-admin.herokuapp.com/")
assert response.status_code == 200
assert 'GOV.UK Notify' in response.content
|
b79c2567bdad69022f00536ebdd66adfcb5e6d48
|
scoreboard/config_defaults.py
|
scoreboard/config_defaults.py
|
# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
|
# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = True
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
|
Make session cookies have secure and httponly flags by default.
|
Make session cookies have secure and httponly flags by default.
|
Python
|
apache-2.0
|
google/ctfscoreboard,google/ctfscoreboard,google/ctfscoreboard,google/ctfscoreboard
|
# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
Make session cookies have secure and httponly flags by default.
|
# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = True
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
|
<commit_before># Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
<commit_msg>Make session cookies have secure and httponly flags by default.<commit_after>
|
# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = True
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
|
# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
Make session cookies have secure and httponly flags by default.# Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = True
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
|
<commit_before># Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
<commit_msg>Make session cookies have secure and httponly flags by default.<commit_after># Copyright 2016 David Tomaschik <david@systemoverlord.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
class Defaults(object):
ATTACHMENT_BACKEND = 'file://attachments'
COUNT_QUERIES = False
CSP_POLICY = None
CWD = os.path.dirname(os.path.realpath(__file__))
DEBUG = False
EXTEND_CSP_POLICY = None
ERROR_404_HELP = False
FIRST_BLOOD = 0
GAME_TIME = (None, None)
LOGIN_METHOD = 'local'
MAIL_FROM = 'ctf@scoreboard'
MAIL_FROM_NAME = None
MAIL_HOST = 'localhost'
NEWS_POLL_INTERVAL = 60000
RULES = '/rules'
SCOREBOARD_ZEROS = True
SCORING = 'plain'
SECRET_KEY = None
SESSION_COOKIE_HTTPONLY = True
SESSION_COOKIE_SECURE = True
SYSTEM_NAME = 'root'
TEAMS = True
TEASE_HIDDEN = True
TITLE = 'Scoreboard'
|
80e8965c068ab27b18cd1db90ddedbc3dfe3c255
|
templated_email/utils.py
|
templated_email/utils.py
|
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
|
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template.template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
|
Fix 'Template is not iterable' error
|
Fix 'Template is not iterable' error
|
Python
|
mit
|
mypebble/django-templated-email,ScanTrust/django-templated-email,mypebble/django-templated-email,hator/django-templated-email,vintasoftware/django-templated-email,hator/django-templated-email,dpetzold/django-templated-email,vintasoftware/django-templated-email,ScanTrust/django-templated-email,dpetzold/django-templated-email,BradWhittington/django-templated-email,BradWhittington/django-templated-email
|
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
Fix 'Template is not iterable' error
|
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template.template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
|
<commit_before>
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
<commit_msg>Fix 'Template is not iterable' error<commit_after>
|
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template.template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
|
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
Fix 'Template is not iterable' error
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template.template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
|
<commit_before>
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
<commit_msg>Fix 'Template is not iterable' error<commit_after>
#From http://stackoverflow.com/questions/2687173/django-how-can-i-get-a-block-from-a-template
from django.template import Context
from django.template.loader_tags import BlockNode, ExtendsNode
class BlockNotFound(Exception):
pass
def _get_node(template, context=Context(), name='subject', block_lookups={}):
for node in template.template:
if isinstance(node, BlockNode) and node.name == name:
#Rudimentary handling of extended templates, for issue #3
for i in xrange(len(node.nodelist)):
n = node.nodelist[i]
if isinstance(n, BlockNode) and n.name in block_lookups:
node.nodelist[i] = block_lookups[n.name]
return node.render(context)
elif isinstance(node, ExtendsNode):
lookups = dict([(n.name, n) for n in node.nodelist if isinstance(n, BlockNode)])
lookups.update(block_lookups)
return _get_node(node.get_parent(context), context, name, lookups)
raise BlockNotFound("Node '%s' could not be found in template." % name)
|
15cb312fd7acbb7fae67cb3953537a95274f9d40
|
saleor/search/forms.py
|
saleor/search/forms.py
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import elasticsearch
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
return elasticsearch.search(self.cleaned_data['q'])
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import picker
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
search = picker.pick_backend()
return search(self.cleaned_data['q'])
|
Use backend picker in storefront search form
|
Use backend picker in storefront search form
|
Python
|
bsd-3-clause
|
UITools/saleor,UITools/saleor,maferelo/saleor,maferelo/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,mociepka/saleor,mociepka/saleor,UITools/saleor,maferelo/saleor
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import elasticsearch
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
return elasticsearch.search(self.cleaned_data['q'])
Use backend picker in storefront search form
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import picker
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
search = picker.pick_backend()
return search(self.cleaned_data['q'])
|
<commit_before>from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import elasticsearch
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
return elasticsearch.search(self.cleaned_data['q'])
<commit_msg>Use backend picker in storefront search form<commit_after>
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import picker
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
search = picker.pick_backend()
return search(self.cleaned_data['q'])
|
from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import elasticsearch
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
return elasticsearch.search(self.cleaned_data['q'])
Use backend picker in storefront search formfrom __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import picker
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
search = picker.pick_backend()
return search(self.cleaned_data['q'])
|
<commit_before>from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import elasticsearch
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
return elasticsearch.search(self.cleaned_data['q'])
<commit_msg>Use backend picker in storefront search form<commit_after>from __future__ import unicode_literals
from django import forms
from django.utils.translation import pgettext
from .backends import picker
class SearchForm(forms.Form):
q = forms.CharField(
label=pgettext('Search form label', 'Query'), required=True)
def search(self):
search = picker.pick_backend()
return search(self.cleaned_data['q'])
|
0034c67af62807221c983c2569dfab71ba302b0e
|
enable/layout/utils.py
|
enable/layout/utils.py
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = left - right
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = right - left
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
|
Fix a typo in the contents constraints creation.
|
Fix a typo in the contents constraints creation.
|
Python
|
bsd-3-clause
|
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = left - right
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
Fix a typo in the contents constraints creation.
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = right - left
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
|
<commit_before>#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = left - right
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
<commit_msg>Fix a typo in the contents constraints creation.<commit_after>
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = right - left
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
|
#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = left - right
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
Fix a typo in the contents constraints creation.#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = right - left
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
|
<commit_before>#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = left - right
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
<commit_msg>Fix a typo in the contents constraints creation.<commit_after>#------------------------------------------------------------------------------
# Copyright (c) 2013, Enthought, Inc.
# All rights reserved.
#------------------------------------------------------------------------------
STRENGTHS = set(['required', 'strong', 'medium', 'weak'])
def add_symbolic_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
bottom = namespace.bottom
left = namespace.left
width = namespace.layout_width
height = namespace.layout_height
namespace.right = left + width
namespace.top = bottom + height
namespace.h_center = left + width / 2.0
namespace.v_center = bottom + height / 2.0
def add_symbolic_contents_constraints(namespace):
""" Add constraints to a namespace that are LinearExpressions of basic
constraints.
"""
left = namespace.contents_left
right = namespace.contents_right
top = namespace.contents_top
bottom = namespace.contents_bottom
namespace.contents_width = right - left
namespace.contents_height = top - bottom
namespace.contents_v_center = bottom + namespace.contents_height / 2.0
namespace.contents_h_center = left + namespace.contents_width / 2.0
|
422a75e4b85345bd517c73760430ae773d49dc00
|
var/spack/packages/arpack/package.py
|
var/spack/packages/arpack/package.py
|
from spack import *
class Arpack(Package):
"""FIXME: put a proper description of your package here."""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def install(self, spec, prefix):
move('./ARMAKES/ARmake.CRAY', './ARmake.inc')
filter_file('PLAT = CRAY', 'PLAT = ', './ARmake.inc', string=True)
filter_file('home = $(HOME)/ARPACK', 'home = %s' % pwd(), './ARmake.inc', string=True)
filter_file('BLASdir = $(home)/BLAS', 'BLASdir = %s' % spec['blas'].prefix, './ARmake.inc', string=True)
filter_file('LAPACKdir = $(home)/LAPACK', 'LAPACKdir = %s' % spec['lapack'].prefix, './ARmake.inc', string=True)
filter_file('ARPACKLIB = $(home)/libarpack_$(PLAT).a', 'ARPACKLIB = %s/lib/libarpack.a' % prefix, './ARmake.inc', string=True)
cd('./SRC')
make('all')
|
from spack import *
class Arpack(Package):
"""A collection of Fortran77 subroutines designed to solve large scale
eigenvalue problems.
"""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def patch(self):
# Filter the cray makefile to make a spack one.
move('ARMAKES/ARmake.CRAY', 'ARmake.inc')
makefile = FileFilter('ARmake.inc')
# Be sure to use Spack F77 wrapper
makefile.filter('^FC.*', 'FC = f77')
# Set up some variables.
makefile.filter('^PLAT.*', 'PLAT = ')
makefile.filter('^home =.*', 'home = %s' % pwd())
makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
# build the library in our own prefix.
makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/lib/libarpack.a' % self.prefix)
def install(self, spec, prefix):
with working_dir('SRC'):
make('all')
|
Clean up arpack build, use the Spack f77 compiler.
|
Clean up arpack build, use the Spack f77 compiler.
|
Python
|
lgpl-2.1
|
TheTimmy/spack,tmerrick1/spack,mfherbst/spack,lgarren/spack,iulian787/spack,lgarren/spack,skosukhin/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,mfherbst/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,mfherbst/spack,skosukhin/spack,matthiasdiener/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,EmreAtes/spack,lgarren/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,EmreAtes/spack,krafczyk/spack,TheTimmy/spack,TheTimmy/spack,skosukhin/spack,tmerrick1/spack,tmerrick1/spack,iulian787/spack,lgarren/spack,LLNL/spack,skosukhin/spack,lgarren/spack,TheTimmy/spack,krafczyk/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,matthiasdiener/spack,matthiasdiener/spack,LLNL/spack,krafczyk/spack,iulian787/spack
|
from spack import *
class Arpack(Package):
"""FIXME: put a proper description of your package here."""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def install(self, spec, prefix):
move('./ARMAKES/ARmake.CRAY', './ARmake.inc')
filter_file('PLAT = CRAY', 'PLAT = ', './ARmake.inc', string=True)
filter_file('home = $(HOME)/ARPACK', 'home = %s' % pwd(), './ARmake.inc', string=True)
filter_file('BLASdir = $(home)/BLAS', 'BLASdir = %s' % spec['blas'].prefix, './ARmake.inc', string=True)
filter_file('LAPACKdir = $(home)/LAPACK', 'LAPACKdir = %s' % spec['lapack'].prefix, './ARmake.inc', string=True)
filter_file('ARPACKLIB = $(home)/libarpack_$(PLAT).a', 'ARPACKLIB = %s/lib/libarpack.a' % prefix, './ARmake.inc', string=True)
cd('./SRC')
make('all')
Clean up arpack build, use the Spack f77 compiler.
|
from spack import *
class Arpack(Package):
"""A collection of Fortran77 subroutines designed to solve large scale
eigenvalue problems.
"""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def patch(self):
# Filter the cray makefile to make a spack one.
move('ARMAKES/ARmake.CRAY', 'ARmake.inc')
makefile = FileFilter('ARmake.inc')
# Be sure to use Spack F77 wrapper
makefile.filter('^FC.*', 'FC = f77')
# Set up some variables.
makefile.filter('^PLAT.*', 'PLAT = ')
makefile.filter('^home =.*', 'home = %s' % pwd())
makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
# build the library in our own prefix.
makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/lib/libarpack.a' % self.prefix)
def install(self, spec, prefix):
with working_dir('SRC'):
make('all')
|
<commit_before>from spack import *
class Arpack(Package):
"""FIXME: put a proper description of your package here."""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def install(self, spec, prefix):
move('./ARMAKES/ARmake.CRAY', './ARmake.inc')
filter_file('PLAT = CRAY', 'PLAT = ', './ARmake.inc', string=True)
filter_file('home = $(HOME)/ARPACK', 'home = %s' % pwd(), './ARmake.inc', string=True)
filter_file('BLASdir = $(home)/BLAS', 'BLASdir = %s' % spec['blas'].prefix, './ARmake.inc', string=True)
filter_file('LAPACKdir = $(home)/LAPACK', 'LAPACKdir = %s' % spec['lapack'].prefix, './ARmake.inc', string=True)
filter_file('ARPACKLIB = $(home)/libarpack_$(PLAT).a', 'ARPACKLIB = %s/lib/libarpack.a' % prefix, './ARmake.inc', string=True)
cd('./SRC')
make('all')
<commit_msg>Clean up arpack build, use the Spack f77 compiler.<commit_after>
|
from spack import *
class Arpack(Package):
"""A collection of Fortran77 subroutines designed to solve large scale
eigenvalue problems.
"""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def patch(self):
# Filter the cray makefile to make a spack one.
move('ARMAKES/ARmake.CRAY', 'ARmake.inc')
makefile = FileFilter('ARmake.inc')
# Be sure to use Spack F77 wrapper
makefile.filter('^FC.*', 'FC = f77')
# Set up some variables.
makefile.filter('^PLAT.*', 'PLAT = ')
makefile.filter('^home =.*', 'home = %s' % pwd())
makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
# build the library in our own prefix.
makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/lib/libarpack.a' % self.prefix)
def install(self, spec, prefix):
with working_dir('SRC'):
make('all')
|
from spack import *
class Arpack(Package):
"""FIXME: put a proper description of your package here."""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def install(self, spec, prefix):
move('./ARMAKES/ARmake.CRAY', './ARmake.inc')
filter_file('PLAT = CRAY', 'PLAT = ', './ARmake.inc', string=True)
filter_file('home = $(HOME)/ARPACK', 'home = %s' % pwd(), './ARmake.inc', string=True)
filter_file('BLASdir = $(home)/BLAS', 'BLASdir = %s' % spec['blas'].prefix, './ARmake.inc', string=True)
filter_file('LAPACKdir = $(home)/LAPACK', 'LAPACKdir = %s' % spec['lapack'].prefix, './ARmake.inc', string=True)
filter_file('ARPACKLIB = $(home)/libarpack_$(PLAT).a', 'ARPACKLIB = %s/lib/libarpack.a' % prefix, './ARmake.inc', string=True)
cd('./SRC')
make('all')
Clean up arpack build, use the Spack f77 compiler.from spack import *
class Arpack(Package):
"""A collection of Fortran77 subroutines designed to solve large scale
eigenvalue problems.
"""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def patch(self):
# Filter the cray makefile to make a spack one.
move('ARMAKES/ARmake.CRAY', 'ARmake.inc')
makefile = FileFilter('ARmake.inc')
# Be sure to use Spack F77 wrapper
makefile.filter('^FC.*', 'FC = f77')
# Set up some variables.
makefile.filter('^PLAT.*', 'PLAT = ')
makefile.filter('^home =.*', 'home = %s' % pwd())
makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
# build the library in our own prefix.
makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/lib/libarpack.a' % self.prefix)
def install(self, spec, prefix):
with working_dir('SRC'):
make('all')
|
<commit_before>from spack import *
class Arpack(Package):
"""FIXME: put a proper description of your package here."""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def install(self, spec, prefix):
move('./ARMAKES/ARmake.CRAY', './ARmake.inc')
filter_file('PLAT = CRAY', 'PLAT = ', './ARmake.inc', string=True)
filter_file('home = $(HOME)/ARPACK', 'home = %s' % pwd(), './ARmake.inc', string=True)
filter_file('BLASdir = $(home)/BLAS', 'BLASdir = %s' % spec['blas'].prefix, './ARmake.inc', string=True)
filter_file('LAPACKdir = $(home)/LAPACK', 'LAPACKdir = %s' % spec['lapack'].prefix, './ARmake.inc', string=True)
filter_file('ARPACKLIB = $(home)/libarpack_$(PLAT).a', 'ARPACKLIB = %s/lib/libarpack.a' % prefix, './ARmake.inc', string=True)
cd('./SRC')
make('all')
<commit_msg>Clean up arpack build, use the Spack f77 compiler.<commit_after>from spack import *
class Arpack(Package):
"""A collection of Fortran77 subroutines designed to solve large scale
eigenvalue problems.
"""
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
version('96', 'fffaa970198b285676f4156cebc8626e')
depends_on('blas')
depends_on('lapack')
def patch(self):
# Filter the cray makefile to make a spack one.
move('ARMAKES/ARmake.CRAY', 'ARmake.inc')
makefile = FileFilter('ARmake.inc')
# Be sure to use Spack F77 wrapper
makefile.filter('^FC.*', 'FC = f77')
# Set up some variables.
makefile.filter('^PLAT.*', 'PLAT = ')
makefile.filter('^home =.*', 'home = %s' % pwd())
makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
# build the library in our own prefix.
makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/lib/libarpack.a' % self.prefix)
def install(self, spec, prefix):
with working_dir('SRC'):
make('all')
|
e8ca2582404d44a6bc97f455187523713c49d90f
|
test/test_random_seed.py
|
test/test_random_seed.py
|
from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_random_seed']
assert_equal(val, 0.4967141530112327)
def test_gaussian_uniform_seed():
gson = \
"""
{
"gaussian_uniform_seed" : uniform(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_uniform_seed']
assert_equal(val, 0.3745401188473625)
|
from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['gaussian_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.4967141530112327)
assert_equal(vals[1], -0.13826430117118466)
def test_gaussian_uniform_seed():
gson = \
"""
{
"uniform_random_seed" : uniform(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['uniform_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.3745401188473625)
assert_equal(vals[1], 0.9507143064099162)
|
Add RandomState to help generate different values, with test
|
Add RandomState to help generate different values, with test
|
Python
|
mit
|
davidcox/genson
|
from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_random_seed']
assert_equal(val, 0.4967141530112327)
def test_gaussian_uniform_seed():
gson = \
"""
{
"gaussian_uniform_seed" : uniform(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_uniform_seed']
assert_equal(val, 0.3745401188473625)
Add RandomState to help generate different values, with test
|
from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['gaussian_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.4967141530112327)
assert_equal(vals[1], -0.13826430117118466)
def test_gaussian_uniform_seed():
gson = \
"""
{
"uniform_random_seed" : uniform(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['uniform_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.3745401188473625)
assert_equal(vals[1], 0.9507143064099162)
|
<commit_before>from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_random_seed']
assert_equal(val, 0.4967141530112327)
def test_gaussian_uniform_seed():
gson = \
"""
{
"gaussian_uniform_seed" : uniform(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_uniform_seed']
assert_equal(val, 0.3745401188473625)
<commit_msg>Add RandomState to help generate different values, with test<commit_after>
|
from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['gaussian_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.4967141530112327)
assert_equal(vals[1], -0.13826430117118466)
def test_gaussian_uniform_seed():
gson = \
"""
{
"uniform_random_seed" : uniform(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['uniform_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.3745401188473625)
assert_equal(vals[1], 0.9507143064099162)
|
from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_random_seed']
assert_equal(val, 0.4967141530112327)
def test_gaussian_uniform_seed():
gson = \
"""
{
"gaussian_uniform_seed" : uniform(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_uniform_seed']
assert_equal(val, 0.3745401188473625)
Add RandomState to help generate different values, with testfrom nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['gaussian_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.4967141530112327)
assert_equal(vals[1], -0.13826430117118466)
def test_gaussian_uniform_seed():
gson = \
"""
{
"uniform_random_seed" : uniform(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['uniform_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.3745401188473625)
assert_equal(vals[1], 0.9507143064099162)
|
<commit_before>from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_random_seed']
assert_equal(val, 0.4967141530112327)
def test_gaussian_uniform_seed():
gson = \
"""
{
"gaussian_uniform_seed" : uniform(0, 1, draws=1, random_seed=42)
}
"""
val = genson.loads(gson).next()['gaussian_uniform_seed']
assert_equal(val, 0.3745401188473625)
<commit_msg>Add RandomState to help generate different values, with test<commit_after>from nose.tools import assert_equal
import genson
def test_gaussian_random_seed():
gson = \
"""
{
"gaussian_random_seed" : gaussian(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['gaussian_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.4967141530112327)
assert_equal(vals[1], -0.13826430117118466)
def test_gaussian_uniform_seed():
gson = \
"""
{
"uniform_random_seed" : uniform(0, 1, draws=2, random_seed=42)
}
"""
vals = [val['uniform_random_seed'] for val in genson.loads(gson)]
assert_equal(vals[0], 0.3745401188473625)
assert_equal(vals[1], 0.9507143064099162)
|
1bb86e33c8862b5423d292ccc1bd74c560af2e44
|
vinotes/apps/api/models.py
|
vinotes/apps/api/models.py
|
from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine}'.format(winery=self.winery, wine=self.title)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)
|
from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine} {vintage}'.format(
winery=self.winery, wine=self.title, vintage=self.vintage)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)
|
Update to include vintage in string representation for Wine.
|
Update to include vintage in string representation for Wine.
|
Python
|
unlicense
|
rcutmore/vinotes-api,rcutmore/vinotes-api
|
from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine}'.format(winery=self.winery, wine=self.title)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)Update to include vintage in string representation for Wine.
|
from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine} {vintage}'.format(
winery=self.winery, wine=self.title, vintage=self.vintage)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)
|
<commit_before>from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine}'.format(winery=self.winery, wine=self.title)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)<commit_msg>Update to include vintage in string representation for Wine.<commit_after>
|
from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine} {vintage}'.format(
winery=self.winery, wine=self.title, vintage=self.vintage)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)
|
from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine}'.format(winery=self.winery, wine=self.title)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)Update to include vintage in string representation for Wine.from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine} {vintage}'.format(
winery=self.winery, wine=self.title, vintage=self.vintage)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)
|
<commit_before>from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine}'.format(winery=self.winery, wine=self.title)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)<commit_msg>Update to include vintage in string representation for Wine.<commit_after>from django.db import models
class Winery(models.Model):
title = models.CharField(max_length=150)
def __str__(self):
return self.title
class Wine(models.Model):
title = models.CharField(max_length=150)
vintage = models.IntegerField()
winery = models.ForeignKey(Winery)
def __str__(self):
return '{winery} {wine} {vintage}'.format(
winery=self.winery, wine=self.title, vintage=self.vintage)
class Note(models.Model):
wine = models.ForeignKey(Wine)
def __str__(self):
return 'Tasting note for: {wine}'.format(wine=self.wine)
|
10a0d12f39760d2c2d57f66bc445f0cb87cde69f
|
django_website/aggregator/management/commands/mark_defunct_feeds.py
|
django_website/aggregator/management/commands/mark_defunct_feeds.py
|
import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
socket.setdefaulttimeout(15)
r = urllib2.urlopen(f.feed_url)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
|
import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
r = urllib2.urlopen(f.feed_url, timeout=15)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
|
Set feed update timeouts in a more modern way.
|
Set feed update timeouts in a more modern way.
|
Python
|
bsd-3-clause
|
vxvinh1511/djangoproject.com,gnarf/djangoproject.com,hassanabidpk/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,nanuxbe/django,django/djangoproject.com,hassanabidpk/djangoproject.com,django/djangoproject.com,xavierdutreilh/djangoproject.com,gnarf/djangoproject.com,gnarf/djangoproject.com,rmoorman/djangoproject.com,gnarf/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,vxvinh1511/djangoproject.com,xavierdutreilh/djangoproject.com,relekang/djangoproject.com,relekang/djangoproject.com,khkaminska/djangoproject.com,alawnchen/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,rmoorman/djangoproject.com,alawnchen/djangoproject.com,nanuxbe/django,khkaminska/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,xavierdutreilh/djangoproject.com,rmoorman/djangoproject.com,khkaminska/djangoproject.com,django/djangoproject.com,alawnchen/djangoproject.com,relekang/djangoproject.com,nanuxbe/django,vxvinh1511/djangoproject.com,hassanabidpk/djangoproject.com
|
import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
socket.setdefaulttimeout(15)
r = urllib2.urlopen(f.feed_url)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
Set feed update timeouts in a more modern way.
|
import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
r = urllib2.urlopen(f.feed_url, timeout=15)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
|
<commit_before>import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
socket.setdefaulttimeout(15)
r = urllib2.urlopen(f.feed_url)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
<commit_msg>Set feed update timeouts in a more modern way.<commit_after>
|
import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
r = urllib2.urlopen(f.feed_url, timeout=15)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
|
import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
socket.setdefaulttimeout(15)
r = urllib2.urlopen(f.feed_url)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
Set feed update timeouts in a more modern way.import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
r = urllib2.urlopen(f.feed_url, timeout=15)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
|
<commit_before>import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
socket.setdefaulttimeout(15)
r = urllib2.urlopen(f.feed_url)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
<commit_msg>Set feed update timeouts in a more modern way.<commit_after>import urllib2
from django.core.management.base import BaseCommand
from django_website.apps.aggregator.models import Feed
class Command(BaseCommand):
"""
Mark people with 404'ing feeds as defunct.
"""
def handle(self, *args, **kwargs):
verbose = kwargs.get('verbosity')
for f in Feed.objects.all():
try:
r = urllib2.urlopen(f.feed_url, timeout=15)
except urllib2.HTTPError, e:
if e.code == 404 or e.code == 500:
if verbose:
print "%s on %s; marking defunct" % (e.code, f)
f.is_defunct = True
f.save()
else:
raise
|
450cb155d87b49a718e465d582bd2ccafb3244dd
|
tests/test_calculator.py
|
tests/test_calculator.py
|
import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
|
import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
def test_calculator_division_method_returns_correct_result(self):
calc = Calculator()
result = calc.division(5,2)
self.assertEqual(2.5, result)
|
Add new test for division
|
Add new test for division
|
Python
|
apache-2.0
|
kamaxeon/fap
|
import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
Add new test for division
|
import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
def test_calculator_division_method_returns_correct_result(self):
calc = Calculator()
result = calc.division(5,2)
self.assertEqual(2.5, result)
|
<commit_before>import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
<commit_msg>Add new test for division<commit_after>
|
import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
def test_calculator_division_method_returns_correct_result(self):
calc = Calculator()
result = calc.division(5,2)
self.assertEqual(2.5, result)
|
import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
Add new test for divisionimport unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
def test_calculator_division_method_returns_correct_result(self):
calc = Calculator()
result = calc.division(5,2)
self.assertEqual(2.5, result)
|
<commit_before>import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
<commit_msg>Add new test for division<commit_after>import unittest
from app.calculator import Calculator
class TestCalculator(unittest.TestCase):
def setUp(self):
self.calc = Calculator()
def test_calculator_addition_method_returns_correct_result(self):
calc = Calculator()
result = calc.addition(2,2)
self.assertEqual(4, result)
def test_calculator_subtraction_method_returns_correct_result(self):
calc = Calculator()
result = calc.substraction(4,2)
self.assertEqual(2, result)
def test_calculator_division_method_returns_correct_result(self):
calc = Calculator()
result = calc.division(5,2)
self.assertEqual(2.5, result)
|
472110a92d15358aee6aeb6dd007f4d237a6fad6
|
compile/06-switch.dg.py
|
compile/06-switch.dg.py
|
..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
|
..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
|
Swap opcodes for if-else and unless-else.
|
Swap opcodes for if-else and unless-else.
Oops.
|
Python
|
mit
|
pyos/dg
|
..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
Swap opcodes for if-else and unless-else.
Oops.
|
..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
|
<commit_before>..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
<commit_msg>Swap opcodes for if-else and unless-else.
Oops.<commit_after>
|
..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
|
..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
Swap opcodes for if-else and unless-else.
Oops...compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
|
<commit_before>..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
<commit_msg>Swap opcodes for if-else and unless-else.
Oops.<commit_after>..compile = import
..parse.syntax = import
compile.r.builtins !! 'else' = (self, cond, otherwise) ->
'''
a if b else c
a unless b else c
Ternary conditional.
'''
is_if, (then, cond) = parse.syntax.else_: cond
ptr = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0 if is_if
ptr = self.opcode: 'POP_JUMP_IF_TRUE' cond delta: 0 unless is_if
jmp = self.opcode: 'JUMP_FORWARD' then delta: 0
ptr:
self.load: otherwise
jmp:
compile.r.builtins !! 'switch' = (self, cases) ->
'''
switch $
condition1 = when_condition1_is_true
...
conditionN = when_conditionN_is_true
Evaluate the first action assigned to a true condition.
`if-elif` is probably a better equivalent than `switch-case`.
'''
jumps = list $ map: c -> (
cond, action = c
next = self.opcode: 'POP_JUMP_IF_FALSE' cond delta: 0
end = self.opcode: 'JUMP_FORWARD' action delta: 0
next:
end
) $ parse.syntax.switch: cases
self.load: None # in case nothing matched
list $ map: x -> (x:) jumps
|
0c3bbe815275b06729cdb52668b38f3a83e7fbac
|
datacommons/crawlers/zz/icij_dump.py
|
datacommons/crawlers/zz/icij_dump.py
|
from normality import slugify, stringify
from csv import DictReader
from zipfile import ZipFile
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = []
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.append(row)
if len(chunk) >= 20000:
context.log.info("Loaded [%s]: %s rows...", table_name, i)
table.insert_many(chunk)
chunk = []
if len(chunk):
table.insert_many(chunk)
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
|
import io
from csv import DictReader
from zipfile import ZipFile
from normality import slugify, stringify
from dataset.chunked import ChunkedInsert
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
fh = io.TextIOWrapper(fh)
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = ChunkedInsert(table)
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.insert(row)
chunk.flush()
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
|
Fix up ICIJ loader as-is
|
Fix up ICIJ loader as-is
|
Python
|
mit
|
pudo/flexicadastre
|
from normality import slugify, stringify
from csv import DictReader
from zipfile import ZipFile
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = []
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.append(row)
if len(chunk) >= 20000:
context.log.info("Loaded [%s]: %s rows...", table_name, i)
table.insert_many(chunk)
chunk = []
if len(chunk):
table.insert_many(chunk)
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
Fix up ICIJ loader as-is
|
import io
from csv import DictReader
from zipfile import ZipFile
from normality import slugify, stringify
from dataset.chunked import ChunkedInsert
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
fh = io.TextIOWrapper(fh)
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = ChunkedInsert(table)
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.insert(row)
chunk.flush()
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
|
<commit_before>from normality import slugify, stringify
from csv import DictReader
from zipfile import ZipFile
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = []
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.append(row)
if len(chunk) >= 20000:
context.log.info("Loaded [%s]: %s rows...", table_name, i)
table.insert_many(chunk)
chunk = []
if len(chunk):
table.insert_many(chunk)
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
<commit_msg>Fix up ICIJ loader as-is<commit_after>
|
import io
from csv import DictReader
from zipfile import ZipFile
from normality import slugify, stringify
from dataset.chunked import ChunkedInsert
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
fh = io.TextIOWrapper(fh)
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = ChunkedInsert(table)
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.insert(row)
chunk.flush()
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
|
from normality import slugify, stringify
from csv import DictReader
from zipfile import ZipFile
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = []
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.append(row)
if len(chunk) >= 20000:
context.log.info("Loaded [%s]: %s rows...", table_name, i)
table.insert_many(chunk)
chunk = []
if len(chunk):
table.insert_many(chunk)
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
Fix up ICIJ loader as-isimport io
from csv import DictReader
from zipfile import ZipFile
from normality import slugify, stringify
from dataset.chunked import ChunkedInsert
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
fh = io.TextIOWrapper(fh)
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = ChunkedInsert(table)
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.insert(row)
chunk.flush()
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
|
<commit_before>from normality import slugify, stringify
from csv import DictReader
from zipfile import ZipFile
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = []
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.append(row)
if len(chunk) >= 20000:
context.log.info("Loaded [%s]: %s rows...", table_name, i)
table.insert_many(chunk)
chunk = []
if len(chunk):
table.insert_many(chunk)
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
<commit_msg>Fix up ICIJ loader as-is<commit_after>import io
from csv import DictReader
from zipfile import ZipFile
from normality import slugify, stringify
from dataset.chunked import ChunkedInsert
def load_file(context, zip, name):
fh = zip.open(name)
_, section, _ = name.rsplit(".", 2)
table_name = "%s_%s" % (context.crawler.name, section)
table = context.datastore[table_name]
table.drop()
fh = io.TextIOWrapper(fh)
reader = DictReader(fh, delimiter=",", quotechar='"')
chunk = ChunkedInsert(table)
for i, row in enumerate(reader, 1):
row = {slugify(k, sep="_"): stringify(v) for (k, v) in row.items()}
chunk.insert(row)
chunk.flush()
context.log.info("Done [%s]: %s rows...", table_name, i)
def load(context, data):
with context.http.rehash(data) as result:
with ZipFile(result.file_path, "r") as zip:
for name in zip.namelist():
load_file(context, zip, name)
|
8f4c376a57c68636188880cd92c64b4640b1c8cc
|
sheared/web/entwine.py
|
sheared/web/entwine.py
|
import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
Remove the builtins arguments to the {tal,metal}.execute calls.
|
Remove the builtins arguments to the {tal,metal}.execute calls.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@107 5646265b-94b7-0310-9681-9501d24b2df7
|
Python
|
mit
|
kirkeby/sheared
|
import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
Remove the builtins arguments to the {tal,metal}.execute calls.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@107 5646265b-94b7-0310-9681-9501d24b2df7
|
import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
<commit_before>import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
<commit_msg>Remove the builtins arguments to the {tal,metal}.execute calls.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@107 5646265b-94b7-0310-9681-9501d24b2df7<commit_after>
|
import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
Remove the builtins arguments to the {tal,metal}.execute calls.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@107 5646265b-94b7-0310-9681-9501d24b2df7import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
<commit_before>import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
<commit_msg>Remove the builtins arguments to the {tal,metal}.execute calls.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@107 5646265b-94b7-0310-9681-9501d24b2df7<commit_after>import warnings
from dtml import tal, metal, tales
from sheared.python import io
class Entwiner:
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
|
a463fe25f21194744e9979840b7535f6cd765e36
|
core/plugins/command.py
|
core/plugins/command.py
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.append(params)
self.process = self.open(cmd, environ)
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shlex
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.extend(shlex.split(params))
self.process = self.open(cmd, environ)
|
Use shlex to split parameters
|
Use shlex to split parameters
|
Python
|
mpl-2.0
|
MozillaSecurity/quokka,drptbl/quokka
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.append(params)
self.process = self.open(cmd, environ)
Use shlex to split parameters
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shlex
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.extend(shlex.split(params))
self.process = self.open(cmd, environ)
|
<commit_before># coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.append(params)
self.process = self.open(cmd, environ)
<commit_msg>Use shlex to split parameters<commit_after>
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shlex
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.extend(shlex.split(params))
self.process = self.open(cmd, environ)
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.append(params)
self.process = self.open(cmd, environ)
Use shlex to split parameters# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shlex
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.extend(shlex.split(params))
self.process = self.open(cmd, environ)
|
<commit_before># coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.append(params)
self.process = self.open(cmd, environ)
<commit_msg>Use shlex to split parameters<commit_after># coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import shlex
from ..quokka import ExternalProcess, PluginException
class ConsoleApplication(ExternalProcess):
def __init__(self, conf):
super(ConsoleApplication, self).__init__()
self.quokka = conf.quokka
self.plugin = conf.plugin_kargs
def start(self):
binary = self.plugin['binary']
if not binary or not os.path.exists(binary):
raise PluginException('%s not found.' % binary)
params = self.plugin['params']
environ = self.set_environ(self.quokka['environ'])
cmd = [binary]
if params:
cmd.extend(shlex.split(params))
self.process = self.open(cmd, environ)
|
a1db577312a31f73a0f1c9f04cc65871f2ef1c95
|
dbaas/maintenance/admin/maintenance.py
|
dbaas/maintenance/admin/maintenance.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for","maximum_workers", 'status',
"main_script", "rollback_script", "host_query","celery_task_id")
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
return super(MaintenanceAdmin, self).add_view(request, form_url,
extra_context)
|
Change field order and customize Maintenance add_view
|
Change field order and customize Maintenance add_view
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for","maximum_workers", 'status',
"main_script", "rollback_script", "host_query","celery_task_id")
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
Change field order and customize Maintenance add_view
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
return super(MaintenanceAdmin, self).add_view(request, form_url,
extra_context)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for","maximum_workers", 'status',
"main_script", "rollback_script", "host_query","celery_task_id")
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
<commit_msg>Change field order and customize Maintenance add_view<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
return super(MaintenanceAdmin, self).add_view(request, form_url,
extra_context)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for","maximum_workers", 'status',
"main_script", "rollback_script", "host_query","celery_task_id")
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
Change field order and customize Maintenance add_view# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
return super(MaintenanceAdmin, self).add_view(request, form_url,
extra_context)
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for","maximum_workers", 'status',
"main_script", "rollback_script", "host_query","celery_task_id")
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
<commit_msg>Change field order and customize Maintenance add_view<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.utils.translation import ugettext_lazy as _
from django_services import admin
from ..models import Maintenance
from ..service.maintenance import MaintenanceService
class MaintenanceAdmin(admin.DjangoServicesAdmin):
service_class = MaintenanceService
search_fields = ("scheduled_for", "description", "maximum_workers", 'status')
list_display = ("scheduled_for", "description", "maximum_workers", 'status')
fields = ( "description", "scheduled_for", "main_script", "rollback_script",
"host_query","maximum_workers", "status", "celery_task_id",)
save_on_top = True
readonly_fields = ('status', 'celery_task_id')
def change_view(self, request, object_id, form_url='', extra_context=None):
maintenance = Maintenance.objects.get(id=object_id)
if maintenance.celery_task_id:
self.readonly_fields = self.fields
return super(MaintenanceAdmin, self).change_view(request,
object_id, form_url, extra_context=extra_context)
def add_view(self, request, form_url='', extra_context=None):
return super(MaintenanceAdmin, self).add_view(request, form_url,
extra_context)
|
2d0a96403d58c4a939b17e67f8f93190839ff340
|
txircd/modules/cmd_ping.py
|
txircd/modules/cmd_ping.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"], prefix=None)
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"])
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]
|
Send the server prefix on the line when the client sends PING
|
Send the server prefix on the line when the client sends PING
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"], prefix=None)
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]Send the server prefix on the line when the client sends PING
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"])
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]
|
<commit_before>from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"], prefix=None)
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]<commit_msg>Send the server prefix on the line when the client sends PING<commit_after>
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"])
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"], prefix=None)
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]Send the server prefix on the line when the client sends PINGfrom twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"])
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]
|
<commit_before>from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"], prefix=None)
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]<commit_msg>Send the server prefix on the line when the client sends PING<commit_after>from twisted.words.protocols import irc
from txircd.modbase import Command
class PingCommand(Command):
def onUse(self, user, data):
if data["params"]:
user.sendMessage("PONG", ":{}".format(data["params"][0]), to=self.ircd.servconfig["server_name"])
else:
user.sendMessage(irc.ERR_NOORIGIN, ":No origin specified")
def updateActivity(self, user):
pass
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"PING": PingCommand()
}
}
def cleanup(self):
del self.ircd.commands["PING"]
|
8fd62b820cb03b1dcfc3945f612ca43f916b86a2
|
prettyplotlib/_eventplot.py
|
prettyplotlib/_eventplot.py
|
__author__ = 'jgosmann'
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
# FIXME 1d positions
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:len(positions)]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
|
__author__ = 'jgosmann'
from matplotlib.cbook import iterable
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
if any(iterable(p) for p in positions):
size = len(positions)
else:
size = 1
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:size]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
|
Fix eventplot for 1d arguments.
|
Fix eventplot for 1d arguments.
|
Python
|
mit
|
olgabot/prettyplotlib,olgabot/prettyplotlib
|
__author__ = 'jgosmann'
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
# FIXME 1d positions
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:len(positions)]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
Fix eventplot for 1d arguments.
|
__author__ = 'jgosmann'
from matplotlib.cbook import iterable
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
if any(iterable(p) for p in positions):
size = len(positions)
else:
size = 1
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:size]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
|
<commit_before>__author__ = 'jgosmann'
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
# FIXME 1d positions
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:len(positions)]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
<commit_msg>Fix eventplot for 1d arguments.<commit_after>
|
__author__ = 'jgosmann'
from matplotlib.cbook import iterable
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
if any(iterable(p) for p in positions):
size = len(positions)
else:
size = 1
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:size]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
|
__author__ = 'jgosmann'
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
# FIXME 1d positions
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:len(positions)]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
Fix eventplot for 1d arguments.__author__ = 'jgosmann'
from matplotlib.cbook import iterable
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
if any(iterable(p) for p in positions):
size = len(positions)
else:
size = 1
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:size]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
|
<commit_before>__author__ = 'jgosmann'
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
# FIXME 1d positions
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:len(positions)]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
<commit_msg>Fix eventplot for 1d arguments.<commit_after>__author__ = 'jgosmann'
from matplotlib.cbook import iterable
from prettyplotlib.utils import remove_chartjunk, maybe_get_ax
from prettyplotlib.colors import set2
def eventplot(*args, **kwargs):
ax, args, kwargs = maybe_get_ax(*args, **kwargs)
show_ticks = kwargs.pop('show_ticks', False)
if len(args) > 0:
positions = args[0]
else:
positions = kwargs['positions']
if any(iterable(p) for p in positions):
size = len(positions)
else:
size = 1
kwargs.setdefault('colors', [c + (1.0,) for c in set2[:size]])
event_collections = ax.eventplot(*args, **kwargs)
remove_chartjunk(ax, ['top', 'right'], show_ticks=show_ticks)
return event_collections
|
21cf7d8dddad631975760ea71ef4f530acecf393
|
hello.py
|
hello.py
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
if __name__ == '__main__':
app.run()
|
Allow running the app from cli
|
Allow running the app from cli
|
Python
|
mit
|
siavashg/tictail-heroku-flask-app,siavashg/tictail-heroku-flask-app
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
Allow running the app from cli
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
if __name__ == '__main__':
app.run()
|
<commit_before>from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
<commit_msg>Allow running the app from cli<commit_after>
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
if __name__ == '__main__':
app.run()
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
Allow running the app from clifrom flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
if __name__ == '__main__':
app.run()
|
<commit_before>from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
<commit_msg>Allow running the app from cli<commit_after>from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello():
return render_template('hello.html')
if __name__ == '__main__':
app.run()
|
f64f0b42f2d1163b2d85194e0979def539f5dca3
|
Lib/fontTools/misc/intTools.py
|
Lib/fontTools/misc/intTools.py
|
__all__ = ['popCount']
def popCount(v):
"""Return number of 1 bits (population count) of an integer.
If the integer is negative, the number of 1 bits in the
twos-complement representation of the integer is returned. i.e.
``popCount(-30) == 28`` because -30 is::
1111 1111 1111 1111 1111 1111 1110 0010
Uses the algorithm from `HAKMEM item 169 <https://www.inwap.com/pdp10/hbaker/hakmem/hacks.html#item169>`_.
Args:
v (int): Value to count.
Returns:
Number of 1 bits in the binary representation of ``v``.
"""
if v > 0xFFFFFFFF:
return popCount(v >> 32) + popCount(v & 0xFFFFFFFF)
# HACKMEM 169
y = (v >> 1) & 0xDB6DB6DB
y = v - y - ((y >> 1) & 0xDB6DB6DB)
return (((y + (y >> 3)) & 0xC71C71C7) % 0x3F)
|
__all__ = ['popCount']
try:
bit_count = int.bit_count
except AttributeError:
def bit_count(v):
return bin(v).count('1')
"""Return number of 1 bits (population count) of the absolute value of an integer.
See https://docs.python.org/3.10/library/stdtypes.html#int.bit_count
"""
popCount = bit_count
|
Consolidate bit_count / popCount methods
|
Consolidate bit_count / popCount methods
Fixes https://github.com/fonttools/fonttools/issues/2331
|
Python
|
mit
|
googlefonts/fonttools,fonttools/fonttools
|
__all__ = ['popCount']
def popCount(v):
"""Return number of 1 bits (population count) of an integer.
If the integer is negative, the number of 1 bits in the
twos-complement representation of the integer is returned. i.e.
``popCount(-30) == 28`` because -30 is::
1111 1111 1111 1111 1111 1111 1110 0010
Uses the algorithm from `HAKMEM item 169 <https://www.inwap.com/pdp10/hbaker/hakmem/hacks.html#item169>`_.
Args:
v (int): Value to count.
Returns:
Number of 1 bits in the binary representation of ``v``.
"""
if v > 0xFFFFFFFF:
return popCount(v >> 32) + popCount(v & 0xFFFFFFFF)
# HACKMEM 169
y = (v >> 1) & 0xDB6DB6DB
y = v - y - ((y >> 1) & 0xDB6DB6DB)
return (((y + (y >> 3)) & 0xC71C71C7) % 0x3F)
Consolidate bit_count / popCount methods
Fixes https://github.com/fonttools/fonttools/issues/2331
|
__all__ = ['popCount']
try:
bit_count = int.bit_count
except AttributeError:
def bit_count(v):
return bin(v).count('1')
"""Return number of 1 bits (population count) of the absolute value of an integer.
See https://docs.python.org/3.10/library/stdtypes.html#int.bit_count
"""
popCount = bit_count
|
<commit_before>__all__ = ['popCount']
def popCount(v):
"""Return number of 1 bits (population count) of an integer.
If the integer is negative, the number of 1 bits in the
twos-complement representation of the integer is returned. i.e.
``popCount(-30) == 28`` because -30 is::
1111 1111 1111 1111 1111 1111 1110 0010
Uses the algorithm from `HAKMEM item 169 <https://www.inwap.com/pdp10/hbaker/hakmem/hacks.html#item169>`_.
Args:
v (int): Value to count.
Returns:
Number of 1 bits in the binary representation of ``v``.
"""
if v > 0xFFFFFFFF:
return popCount(v >> 32) + popCount(v & 0xFFFFFFFF)
# HACKMEM 169
y = (v >> 1) & 0xDB6DB6DB
y = v - y - ((y >> 1) & 0xDB6DB6DB)
return (((y + (y >> 3)) & 0xC71C71C7) % 0x3F)
<commit_msg>Consolidate bit_count / popCount methods
Fixes https://github.com/fonttools/fonttools/issues/2331<commit_after>
|
__all__ = ['popCount']
try:
bit_count = int.bit_count
except AttributeError:
def bit_count(v):
return bin(v).count('1')
"""Return number of 1 bits (population count) of the absolute value of an integer.
See https://docs.python.org/3.10/library/stdtypes.html#int.bit_count
"""
popCount = bit_count
|
__all__ = ['popCount']
def popCount(v):
"""Return number of 1 bits (population count) of an integer.
If the integer is negative, the number of 1 bits in the
twos-complement representation of the integer is returned. i.e.
``popCount(-30) == 28`` because -30 is::
1111 1111 1111 1111 1111 1111 1110 0010
Uses the algorithm from `HAKMEM item 169 <https://www.inwap.com/pdp10/hbaker/hakmem/hacks.html#item169>`_.
Args:
v (int): Value to count.
Returns:
Number of 1 bits in the binary representation of ``v``.
"""
if v > 0xFFFFFFFF:
return popCount(v >> 32) + popCount(v & 0xFFFFFFFF)
# HACKMEM 169
y = (v >> 1) & 0xDB6DB6DB
y = v - y - ((y >> 1) & 0xDB6DB6DB)
return (((y + (y >> 3)) & 0xC71C71C7) % 0x3F)
Consolidate bit_count / popCount methods
Fixes https://github.com/fonttools/fonttools/issues/2331__all__ = ['popCount']
try:
bit_count = int.bit_count
except AttributeError:
def bit_count(v):
return bin(v).count('1')
"""Return number of 1 bits (population count) of the absolute value of an integer.
See https://docs.python.org/3.10/library/stdtypes.html#int.bit_count
"""
popCount = bit_count
|
<commit_before>__all__ = ['popCount']
def popCount(v):
"""Return number of 1 bits (population count) of an integer.
If the integer is negative, the number of 1 bits in the
twos-complement representation of the integer is returned. i.e.
``popCount(-30) == 28`` because -30 is::
1111 1111 1111 1111 1111 1111 1110 0010
Uses the algorithm from `HAKMEM item 169 <https://www.inwap.com/pdp10/hbaker/hakmem/hacks.html#item169>`_.
Args:
v (int): Value to count.
Returns:
Number of 1 bits in the binary representation of ``v``.
"""
if v > 0xFFFFFFFF:
return popCount(v >> 32) + popCount(v & 0xFFFFFFFF)
# HACKMEM 169
y = (v >> 1) & 0xDB6DB6DB
y = v - y - ((y >> 1) & 0xDB6DB6DB)
return (((y + (y >> 3)) & 0xC71C71C7) % 0x3F)
<commit_msg>Consolidate bit_count / popCount methods
Fixes https://github.com/fonttools/fonttools/issues/2331<commit_after>__all__ = ['popCount']
try:
bit_count = int.bit_count
except AttributeError:
def bit_count(v):
return bin(v).count('1')
"""Return number of 1 bits (population count) of the absolute value of an integer.
See https://docs.python.org/3.10/library/stdtypes.html#int.bit_count
"""
popCount = bit_count
|
5cb2d684ac3a0f99153cf88649b1f9d5274e4c76
|
seo/escaped_fragment/app.py
|
seo/escaped_fragment/app.py
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output(["phantomjs",
"--load-images=false", "crawler.js", url])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
|
Enable disk cache for PhJS
|
Enable disk cache for PhJS
|
Python
|
apache-2.0
|
orgkhnargh/platformio-web,platformio/platformio-web,orgkhnargh/platformio-web,platformio/platformio-web,orgkhnargh/platformio-web
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output(["phantomjs",
"--load-images=false", "crawler.js", url])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
Enable disk cache for PhJS
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
|
<commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output(["phantomjs",
"--load-images=false", "crawler.js", url])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
<commit_msg>Enable disk cache for PhJS<commit_after>
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output(["phantomjs",
"--load-images=false", "crawler.js", url])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
Enable disk cache for PhJS# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
|
<commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output(["phantomjs",
"--load-images=false", "crawler.js", url])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
<commit_msg>Enable disk cache for PhJS<commit_after># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.ikravets.com/#!" + unquote(qs[19:])
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if "404 Not Found" in response:
status = "404 Not Found"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
|
ea887d44f7e66d48036ffa81d678311de3857271
|
jsonpickle/handlers.py
|
jsonpickle/handlers.py
|
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, type, handler):
"""
Register handler.
:Parameters:
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[type] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
|
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, cls, handler):
"""
Register handler.
:Parameters:
- `cls`: Object class
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[cls] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
|
Remove usage of built-in 'type' name
|
jsonpickle.handler: Remove usage of built-in 'type' name
'type' is a built-in function so use 'cls' instead of 'type'.
Signed-off-by: David Aguilar <9de348c050f7cd1ca590883733c4e531ce610bf4@gmail.com>
|
Python
|
bsd-3-clause
|
dongguangming/jsonpickle,eoghanmurray/jsonpickle_prev,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle,mandx/jsonpickle,mandx/jsonpickle,dongguangming/jsonpickle,eoghanmurray/jsonpickle_prev
|
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, type, handler):
"""
Register handler.
:Parameters:
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[type] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
jsonpickle.handler: Remove usage of built-in 'type' name
'type' is a built-in function so use 'cls' instead of 'type'.
Signed-off-by: David Aguilar <9de348c050f7cd1ca590883733c4e531ce610bf4@gmail.com>
|
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, cls, handler):
"""
Register handler.
:Parameters:
- `cls`: Object class
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[cls] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
|
<commit_before>class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, type, handler):
"""
Register handler.
:Parameters:
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[type] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
<commit_msg>jsonpickle.handler: Remove usage of built-in 'type' name
'type' is a built-in function so use 'cls' instead of 'type'.
Signed-off-by: David Aguilar <9de348c050f7cd1ca590883733c4e531ce610bf4@gmail.com><commit_after>
|
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, cls, handler):
"""
Register handler.
:Parameters:
- `cls`: Object class
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[cls] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
|
class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, type, handler):
"""
Register handler.
:Parameters:
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[type] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
jsonpickle.handler: Remove usage of built-in 'type' name
'type' is a built-in function so use 'cls' instead of 'type'.
Signed-off-by: David Aguilar <9de348c050f7cd1ca590883733c4e531ce610bf4@gmail.com>class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, cls, handler):
"""
Register handler.
:Parameters:
- `cls`: Object class
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[cls] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
|
<commit_before>class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, type, handler):
"""
Register handler.
:Parameters:
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[type] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
<commit_msg>jsonpickle.handler: Remove usage of built-in 'type' name
'type' is a built-in function so use 'cls' instead of 'type'.
Signed-off-by: David Aguilar <9de348c050f7cd1ca590883733c4e531ce610bf4@gmail.com><commit_after>class BaseHandler(object):
"""
Abstract base class for handlers.
"""
def __init__(self, base):
"""
Initialize a new handler to handle `type`.
:Parameters:
- `base`: reference to pickler/unpickler
"""
self._base = base
def flatten(self, obj, data):
"""
Flatten `obj` into a json-friendly form.
:Parameters:
- `obj`: object of `type`
"""
raise NotImplementedError("Abstract method.")
def restore(self, obj):
"""
Restores the `obj` to `type`
:Parameters:
- `object`: json-friendly object
"""
raise NotImplementedError("Abstract method.")
class Registry(object):
REGISTRY = {}
def register(self, cls, handler):
"""
Register handler.
:Parameters:
- `cls`: Object class
- `handler`: `BaseHandler` subclass
"""
self.REGISTRY[cls] = handler
return handler
def get(self, cls):
"""
Get the customer handler for `obj` (if any)
:Parameters:
- `cls`: class to handle
"""
return self.REGISTRY.get(cls, None)
registry = Registry()
|
adfaff320066422734c28759688f75e3f127078c
|
icekit/plugins/contact_person/models.py
|
icekit/plugins/contact_person/models.py
|
import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
|
from fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
|
Repair 500 viewing contact person
|
Repair 500 viewing contact person
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
Repair 500 viewing contact person
|
from fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
|
<commit_before>import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
<commit_msg>Repair 500 viewing contact person<commit_after>
|
from fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
|
import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
Repair 500 viewing contact personfrom fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
|
<commit_before>import os
from django.core.urlresolvers import NoReverseMatch
from fluent_contents.models import ContentItem
from fluent_pages.urlresolvers import app_reverse, PageTypeNotMounted
from icekit.publishing.models import PublishingModel
from timezone import timezone
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from icekit.content_collections.abstract_models import AbstractCollectedContent, \
TitleSlugMixin, AbstractListingPage
from icekit.mixins import FluentFieldsMixin
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return "{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
<commit_msg>Repair 500 viewing contact person<commit_after>from fluent_contents.models import ContentItem
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class ContactPerson(models.Model):
name = models.CharField(max_length=255)
title = models.CharField(max_length=255, blank=True)
phone = models.CharField(max_length=255, blank=True)
email = models.EmailField(max_length=255, blank=True)
def __str__(self):
return u"{} ({})".format(self.name, self.title)
class Meta:
verbose_name_plural = "Contact people"
@python_2_unicode_compatible
class ContactPersonItem(ContentItem):
"""
A content item that links to a Press Contact.
"""
contact = models.ForeignKey(ContactPerson)
help_text = \
'A content plugin that allows you to add press contact information.'
class Meta:
verbose_name = _('Contact Person')
def __str__(self):
return str(self.contact)
|
79bf3254cc4690bb8c72f5214fd0c27ea8ff1c15
|
pypods/loc/locdatasource.py
|
pypods/loc/locdatasource.py
|
import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")
|
import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")
|
Correct imports in loc package.
|
Correct imports in loc package.
|
Python
|
epl-1.0
|
dls-controls/PyPODS
|
import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")Correct imports in loc package.
|
import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")
|
<commit_before>import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")<commit_msg>Correct imports in loc package.<commit_after>
|
import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")
|
import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")Correct imports in loc package.import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")
|
<commit_before>import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")<commit_msg>Correct imports in loc package.<commit_after>import re
from pypods.datasource import DataSource
from pypods.loc.locchannelhandler import LocChannelHandler
class LocDataSource(DataSource):
def __init__(self):
super(LocDataSource, self).__init__()
self.channels = dict()
def create_channel(self, channel_name):
"""Creates a channel handler for the given name"""
n, v = self.parse_name(channel_name)
# TODO: create and return channelhandler
if n in self.channels.keys():
self.channels[n].set_initial_value(v)
else:
newchan = LocChannelHandler(n)
newchan.set_initial_value(v)
self.channels[n] = newchan
return self.channels[n]
def parse_name(self, name):
# Name should be of format like test31(3)
m = re.match("(.+)\((.+)\)", name)
if m is not None:
return m.groups()
else:
raise Exception("Name format is invalid")
if __name__ == "__main__":
l = LocDataSource()
l.create_channel("test(5)")
|
92676c0e84df0e1c0d14766b339410d09c60b5fb
|
froide/helper/forms.py
|
froide/helper/forms.py
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
Make empty tag form valid
|
Make empty tag form valid
|
Python
|
mit
|
stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
Make empty tag form valid
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
<commit_msg>Make empty tag form valid<commit_after>
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
Make empty tag form validfrom django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
<commit_before>from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
<commit_msg>Make empty tag form valid<commit_after>from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
from taggit.forms import TagField
from taggit.utils import edit_string_for_tags
from .widgets import TagAutocompleteWidget
class TagObjectForm(forms.Form):
def __init__(self, *args, **kwargs):
tags = kwargs.pop('tags', [])
if tags:
kwargs['initial'] = {'tags': edit_string_for_tags(tags)}
autocomplete_url = kwargs.pop('autocomplete_url', None)
if autocomplete_url is not None:
self.tags_autocomplete_url = autocomplete_url
super(TagObjectForm, self).__init__(*args, **kwargs)
self.fields['tags'] = TagField(
label=_("Tags"),
widget=TagAutocompleteWidget(
attrs={'placeholder': _('Tags')},
autocomplete_url=self.tags_autocomplete_url
),
required=False,
help_text=_("Comma separated and quoted")
)
def save(self, obj):
obj.tags.set(*self.cleaned_data['tags'])
obj.save()
def get_fk_form_class(model, field, admin_site, queryset=None):
remote_field = model._meta.get_field(field).remote_field
if queryset is None:
queryset = remote_field.model.objects.all()
widget = ForeignKeyRawIdWidget(remote_field, admin_site)
class ForeignKeyForm(forms.Form):
obj = forms.ModelChoiceField(queryset=queryset,
widget=widget)
return ForeignKeyForm
|
ea20f912696974a2543a8fa15f63f0a3b64d7263
|
froide/helper/utils.py
|
froide/helper/utils.py
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
Add utility function to get client IP from request
|
Add utility function to get client IP from request
|
Python
|
mit
|
ryankanno/froide,fin/froide,CodeforHawaii/froide,fin/froide,catcosmo/froide,LilithWittmann/froide,okfse/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,fin/froide,stefanw/froide,catcosmo/froide,okfse/froide,CodeforHawaii/froide,ryankanno/froide,stefanw/froide,catcosmo/froide,LilithWittmann/froide,fin/froide,catcosmo/froide,okfse/froide,catcosmo/froide,ryankanno/froide,okfse/froide,stefanw/froide,CodeforHawaii/froide,CodeforHawaii/froide,LilithWittmann/froide,CodeforHawaii/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
Add utility function to get client IP from request
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
<commit_before>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
<commit_msg>Add utility function to get client IP from request<commit_after>
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
Add utility function to get client IP from requestfrom django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
<commit_before>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
<commit_msg>Add utility function to get client IP from request<commit_after>from django.shortcuts import render
def get_next(request):
# This is not a view
return request.GET.get("next", request.META.get("HTTP_REFERER", "/"))
def render_code(code, request, context={}):
return render(request, "%d.html" % code, context,
status=code)
def render_400(request):
return render_code(400, request)
def render_405(request):
return render_code(405, request)
def render_403(request, message=''):
return render_code(403, request,
context={"message": message})
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[-1].strip()
else:
ip = request.META.get('REMOTE_ADDR')
return ip
|
e0385d0ba8fab48f129175123e103544574d1dac
|
commands.py
|
commands.py
|
#!/usr/bin/env python
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
Remove shebang line from non-script.
|
Remove shebang line from non-script.
|
Python
|
mit
|
dripton/ampchat
|
#!/usr/bin/env python
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
Remove shebang line from non-script.
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
<commit_before>#!/usr/bin/env python
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
<commit_msg>Remove shebang line from non-script.<commit_after>
|
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
#!/usr/bin/env python
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
Remove shebang line from non-script.from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
<commit_before>#!/usr/bin/env python
from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
<commit_msg>Remove shebang line from non-script.<commit_after>from twisted.protocols import amp
from twisted.cred.error import UnauthorizedLogin
# commands to server side
class Login(amp.Command):
arguments = [("username", amp.String()), ("password", amp.String())]
response = []
errors = {UnauthorizedLogin: "UnauthorizedLogin"}
# If we set requiresAnswer = False, then the client-side callRemote
# returns None instead of a deferred, and we can't attach callbacks.
# So be sure to return an empty dict instead.
# TODO doc patch for twisted
class SendToAll(amp.Command):
arguments = [("message", amp.String())]
response = []
class SendToUser(amp.Command):
arguments = [("message", amp.String()), "username", amp.String()]
response = []
# commands to client side
class Send(amp.Command):
arguments = [("message", amp.String()), ("sender", amp.String())]
response = []
class AddUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class DelUser(amp.Command):
arguments = [("user", amp.String())]
response = []
class LoggedIn(amp.Command):
arguments = [("ok", amp.Boolean())]
response = []
|
e981581e9bc1b4ac4410204dab5f7f71d1dcac79
|
readthedocs/profiles/urls.py
|
readthedocs/profiles/urls.py
|
"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>\w+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
|
"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>[\w-.]+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
|
Allow periods and dashes in profile page
|
Allow periods and dashes in profile page
|
Python
|
mit
|
dirn/readthedocs.org,nikolas/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,kenshinthebattosai/readthedocs.org,d0ugal/readthedocs.org,rtfd/readthedocs.org,nikolas/readthedocs.org,kenshinthebattosai/readthedocs.org,gjtorikian/readthedocs.org,davidfischer/readthedocs.org,takluyver/readthedocs.org,wanghaven/readthedocs.org,sid-kap/readthedocs.org,hach-que/readthedocs.org,hach-que/readthedocs.org,agjohnson/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,espdev/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,CedarLogic/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,KamranMackey/readthedocs.org,tddv/readthedocs.org,soulshake/readthedocs.org,takluyver/readthedocs.org,espdev/readthedocs.org,wanghaven/readthedocs.org,SteveViss/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,wijerasa/readthedocs.org,kdkeyser/readthedocs.org,titiushko/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,tddv/readthedocs.org,CedarLogic/readthedocs.org,sils1297/readthedocs.org,royalwang/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,singingwolfboy/readthedocs.org,titiushko/readthedocs.org,Carreau/readthedocs.org,agjohnson/readthedocs.org,fujita-shintaro/readthedocs.org,dirn/readthedocs.org,singingwolfboy/readthedocs.org,safwanrahman/readthedocs.org,atsuyim/readthedocs.org,pombredanne/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,michaelmcandrew/readthedocs.org,d0ugal/readthedocs.org,michaelmcandrew/readthedocs.org,stevepiercy/readthedocs.org,kenwang76/readthedocs.org,emawind84/readthedocs.org,Carreau/readthedocs.org,davidfischer/readthedocs.org,mhils/readthedocs.org,safwanrahman/readthedocs.org,atsuyim/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,sils1297/readthedocs.org,kdkeyser/readthedocs.org,KamranMackey/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,gjtorikian/readthedocs.org,Carreau/readthedocs.org,kenwang76/readthedocs.org,dirn/readthedocs.org,GovReady/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,stevepiercy/readthedocs.org,soulshake/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,royalwang/readthedocs.org,wijerasa/readthedocs.org,laplaceliu/readthedocs.org,singingwolfboy/readthedocs.org,jerel/readthedocs.org,asampat3090/readthedocs.org,KamranMackey/readthedocs.org,mhils/readthedocs.org,sunnyzwh/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,asampat3090/readthedocs.org,VishvajitP/readthedocs.org,KamranMackey/readthedocs.org,royalwang/readthedocs.org,jerel/readthedocs.org,emawind84/readthedocs.org,LukasBoersma/readthedocs.org,cgourlay/readthedocs.org,SteveViss/readthedocs.org,Tazer/readthedocs.org,fujita-shintaro/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,fujita-shintaro/readthedocs.org,atsuyim/readthedocs.org,nikolas/readthedocs.org,mrshoki/readthedocs.org,laplaceliu/readthedocs.org,sils1297/readthedocs.org,raven47git/readthedocs.org,mrshoki/readthedocs.org,techtonik/readthedocs.org,cgourlay/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,VishvajitP/readthedocs.org,techtonik/readthedocs.org,agjohnson/readthedocs.org,sid-kap/readthedocs.org,Tazer/readthedocs.org,nikolas/readthedocs.org,Tazer/readthedocs.org,wijerasa/readthedocs.org,raven47git/readthedocs.org,kdkeyser/readthedocs.org,clarkperkins/readthedocs.org,wijerasa/readthedocs.org,titiushko/readthedocs.org,techtonik/readthedocs.org,hach-que/readthedocs.org,cgourlay/readthedocs.org,davidfischer/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,VishvajitP/readthedocs.org,d0ugal/readthedocs.org,stevepiercy/readthedocs.org,techtonik/readthedocs.org,mrshoki/readthedocs.org,SteveViss/readthedocs.org,d0ugal/readthedocs.org,fujita-shintaro/readthedocs.org,wanghaven/readthedocs.org,attakei/readthedocs-oauth,GovReady/readthedocs.org,wanghaven/readthedocs.org,kdkeyser/readthedocs.org,kenwang76/readthedocs.org,istresearch/readthedocs.org,espdev/readthedocs.org,Carreau/readthedocs.org,mhils/readthedocs.org,Tazer/readthedocs.org,atsuyim/readthedocs.org,jerel/readthedocs.org,istresearch/readthedocs.org,asampat3090/readthedocs.org,jerel/readthedocs.org,LukasBoersma/readthedocs.org,GovReady/readthedocs.org,istresearch/readthedocs.org,LukasBoersma/readthedocs.org,soulshake/readthedocs.org,asampat3090/readthedocs.org,safwanrahman/readthedocs.org,attakei/readthedocs-oauth,royalwang/readthedocs.org,LukasBoersma/readthedocs.org,mrshoki/readthedocs.org,takluyver/readthedocs.org,michaelmcandrew/readthedocs.org,rtfd/readthedocs.org,raven47git/readthedocs.org,laplaceliu/readthedocs.org,dirn/readthedocs.org,clarkperkins/readthedocs.org,sunnyzwh/readthedocs.org,mhils/readthedocs.org,sid-kap/readthedocs.org
|
"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>\w+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
Allow periods and dashes in profile page
|
"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>[\w-.]+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
|
<commit_before>"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>\w+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
<commit_msg>Allow periods and dashes in profile page<commit_after>
|
"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>[\w-.]+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
|
"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>\w+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
Allow periods and dashes in profile page"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>[\w-.]+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
|
<commit_before>"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>\w+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
<commit_msg>Allow periods and dashes in profile page<commit_after>"""
URLConf for Django user profile management.
Recommended usage is to use a call to ``include()`` in your project's
root URLConf to include this URLConf for any URL beginning with
'/profiles/'.
If the default behavior of the profile views is acceptable to you,
simply use a line like this in your root URLConf to set up the default
URLs for profiles::
(r'^profiles/', include('profiles.urls')),
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to keep the name ``profiles_profile_detail`` for the pattern
which points to the ``profile_detail`` view, since several views use
``reverse()`` with that name to generate a default post-submission
redirect. If you don't use that name, remember to explicitly pass
``success_url`` to those views.
"""
from django.conf.urls import *
from profiles import views
urlpatterns = patterns('',
url(r'^create/$',
views.create_profile,
name='profiles_create_profile'),
url(r'^edit/$',
views.edit_profile,
name='profiles_edit_profile'),
url(r'^(?P<username>[\w-.]+)/$',
views.profile_detail,
name='profiles_profile_detail'),
url(r'^$',
views.ProfileListView.as_view(),
name='profiles_profile_list'),
)
|
486156f344af66fa762e6321d52e26b40c734e38
|
login.py
|
login.py
|
'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request
|
'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env if running locally
if os.environ.get('HEROKU') is None:
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request
|
Add check for Heroku before .env import
|
Add check for Heroku before .env import
Heroku was rightfully breaking when loadenv() was called as it already
had the proper environment variables. Add a check for Heroku before
loading the variables.
|
Python
|
mit
|
Jacobinski/SaltBot
|
'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, requestAdd check for Heroku before .env import
Heroku was rightfully breaking when loadenv() was called as it already
had the proper environment variables. Add a check for Heroku before
loading the variables.
|
'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env if running locally
if os.environ.get('HEROKU') is None:
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request
|
<commit_before>'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request<commit_msg>Add check for Heroku before .env import
Heroku was rightfully breaking when loadenv() was called as it already
had the proper environment variables. Add a check for Heroku before
loading the variables.<commit_after>
|
'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env if running locally
if os.environ.get('HEROKU') is None:
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request
|
'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, requestAdd check for Heroku before .env import
Heroku was rightfully breaking when loadenv() was called as it already
had the proper environment variables. Add a check for Heroku before
loading the variables.'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env if running locally
if os.environ.get('HEROKU') is None:
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request
|
<commit_before>'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request<commit_msg>Add check for Heroku before .env import
Heroku was rightfully breaking when loadenv() was called as it already
had the proper environment variables. Add a check for Heroku before
loading the variables.<commit_after>'''
The user login module for SaltBot
'''
import requests
import os
from dotenv import load_dotenv, find_dotenv
URL_SIGNIN = 'https://www.saltybet.com/authenticate?signin=1'
def saltbot_login():
# Default the return values to None
session = None
request = None
# Start a session so we can have persistant cookies
session = requests.session()
# Obtain login specifics from .env if running locally
if os.environ.get('HEROKU') is None:
load_dotenv(find_dotenv())
# This is the form data that the page sends when logging in
login_data = {
'email': os.environ.get('EMAIL'),
'pword': os.environ.get('PASSWORD'),
'authenticate': 'signin'
}
# Authenticate
request = session.post(URL_SIGNIN, data=login_data)
# Check for successful login & redirect
if request.url != "https://www.saltybet.com/" and request.url != "http://www.saltybet.com/":
print("Error: Wrong URL: " + request.url)
return session, request
|
638e9761a6a42a8ab9d8eb7996b0a19d394ad3ea
|
precision/accounts/urls.py
|
precision/accounts/urls.py
|
from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
]
|
from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login, password_change, password_change_done
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
# Password Change
# ===============
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
url(
regex=r'^password-change/$',
view=password_change,
name='password_change'
),
url(
regex=r'^password-change/done/$',
view=password_change_done,
name='password_change_done'
),
]
|
Add password change url patterns
|
Add password change url patterns
|
Python
|
mit
|
FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management
|
from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
]
Add password change url patterns
|
from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login, password_change, password_change_done
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
# Password Change
# ===============
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
url(
regex=r'^password-change/$',
view=password_change,
name='password_change'
),
url(
regex=r'^password-change/done/$',
view=password_change_done,
name='password_change_done'
),
]
|
<commit_before>from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
]
<commit_msg>Add password change url patterns<commit_after>
|
from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login, password_change, password_change_done
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
# Password Change
# ===============
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
url(
regex=r'^password-change/$',
view=password_change,
name='password_change'
),
url(
regex=r'^password-change/done/$',
view=password_change_done,
name='password_change_done'
),
]
|
from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
]
Add password change url patternsfrom django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login, password_change, password_change_done
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
# Password Change
# ===============
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
url(
regex=r'^password-change/$',
view=password_change,
name='password_change'
),
url(
regex=r'^password-change/done/$',
view=password_change_done,
name='password_change_done'
),
]
|
<commit_before>from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
]
<commit_msg>Add password change url patterns<commit_after>from django.conf.urls import url
from django.contrib.auth.views import login, logout, logout_then_login, password_change, password_change_done
from .views import SignInView
urlpatterns = [
# Authentication
# ==============
url(
regex=r'^login/$',
view=login,
name='login'
),
url(
regex=r'^logout/$',
view=logout,
name='logout'
),
# Password Change
# ===============
url(
regex=r'^logout-then-login/$',
view=logout_then_login,
name='logout_then_login'
),
url(
regex=r'^password-change/$',
view=password_change,
name='password_change'
),
url(
regex=r'^password-change/done/$',
view=password_change_done,
name='password_change_done'
),
]
|
e15029d051bbfa12dd8c01709e94e6b731b243e1
|
djangopress/tests/test_templatetags.py
|
djangopress/tests/test_templatetags.py
|
"""Test djangopress templatetags."""
from django.template import Template, Context
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
|
"""Test djangopress templatetags."""
from django.template import Template, Context
from djangopress.templatetags.djangopress import archive_list
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
def test_archive_list_dictionary():
"""Test the dictionary of archive list."""
dictionary = archive_list()
assert dictionary == {}
|
Test archive_list returns a dictionary
|
Test archive_list returns a dictionary
|
Python
|
mit
|
gilmrjc/djangopress,gilmrjc/djangopress,gilmrjc/djangopress
|
"""Test djangopress templatetags."""
from django.template import Template, Context
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
Test archive_list returns a dictionary
|
"""Test djangopress templatetags."""
from django.template import Template, Context
from djangopress.templatetags.djangopress import archive_list
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
def test_archive_list_dictionary():
"""Test the dictionary of archive list."""
dictionary = archive_list()
assert dictionary == {}
|
<commit_before>"""Test djangopress templatetags."""
from django.template import Template, Context
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
<commit_msg>Test archive_list returns a dictionary<commit_after>
|
"""Test djangopress templatetags."""
from django.template import Template, Context
from djangopress.templatetags.djangopress import archive_list
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
def test_archive_list_dictionary():
"""Test the dictionary of archive list."""
dictionary = archive_list()
assert dictionary == {}
|
"""Test djangopress templatetags."""
from django.template import Template, Context
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
Test archive_list returns a dictionary"""Test djangopress templatetags."""
from django.template import Template, Context
from djangopress.templatetags.djangopress import archive_list
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
def test_archive_list_dictionary():
"""Test the dictionary of archive list."""
dictionary = archive_list()
assert dictionary == {}
|
<commit_before>"""Test djangopress templatetags."""
from django.template import Template, Context
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
<commit_msg>Test archive_list returns a dictionary<commit_after>"""Test djangopress templatetags."""
from django.template import Template, Context
from djangopress.templatetags.djangopress import archive_list
def test_archive_list_tag():
"""Test the archive_list tag."""
template_snippet = '{% load djangopress %}{% archive_list %}'
Template(template_snippet).render(Context({}))
def test_archive_list_dictionary():
"""Test the dictionary of archive list."""
dictionary = archive_list()
assert dictionary == {}
|
30fa7e0137b2afae8a4b1de01fcde14bc9e7e910
|
iktomi/web/shortcuts.py
|
iktomi/web/shortcuts.py
|
# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
|
# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name, convs=convs)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
|
Fix Rule: pass convs argument to match
|
Fix Rule: pass convs argument to match
|
Python
|
mit
|
boltnev/iktomi,oas89/iktomi,Lehych/iktomi,SmartTeleMax/iktomi,boltnev/iktomi,SlivTime/iktomi,SlivTime/iktomi,Lehych/iktomi,oas89/iktomi,oas89/iktomi,SlivTime/iktomi,Lehych/iktomi,SmartTeleMax/iktomi,boltnev/iktomi,SmartTeleMax/iktomi
|
# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
Fix Rule: pass convs argument to match
|
# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name, convs=convs)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
|
<commit_before># -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
<commit_msg>Fix Rule: pass convs argument to match<commit_after>
|
# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name, convs=convs)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
|
# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
Fix Rule: pass convs argument to match# -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name, convs=convs)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
|
<commit_before># -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
<commit_msg>Fix Rule: pass convs argument to match<commit_after># -*- coding: utf-8 -*-
import json
from webob.exc import status_map
from webob import Response
from .core import cases
from . import filters
__all__ = ['redirect_to', 'http_error', 'to_json', 'Rule']
def redirect_to(endpoint, _code=303, qs=None, **kwargs):
def handle(env, data):
url = env.root.build_url(endpoint, **kwargs)
if qs is not None:
url = url.qs_set(qs)
raise status_map[_code](location=str(url))
return handle
def http_error(_code, **kwargs):
def handle(env, data):
raise status_map[_code](**kwargs)
return handle
def to_json(data):
return Response(json.dumps(data))
def Rule(path, handler, method=None, name=None, convs=None):
# werkzeug-style Rule
if name is None:
name = handler.func_name
h = filters.match(path, name, convs=convs)
if method is not None:
h = h | cases(filters.method(method),
http_error(405))
return h | handler
|
c7e65db27da59ddf221d1720362434581ef30311
|
test/unit/locale/test_locale.py
|
test/unit/locale/test_locale.py
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
|
Make test_translations test our tree
|
Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24
|
Python
|
apache-2.0
|
swiftstack/swift,rackerlabs/swift,zackmdavis/swift,williamthegrey/swift,eatbyte/Swift,matthewoliver/swift,Seagate/swift,anishnarang/gswift,clayg/swift,shibaniahegde/OpenStak_swift,openstack/swift,prashanthpai/swift,matthewoliver/swift,psachin/swift,nadeemsyed/swift,AfonsoFGarcia/swift,prashanthpai/swift,smerritt/swift,levythu/swift,hbhdytf/mac,mjzmjz/swift,Khushbu27/Tutorial,redhat-openstack/swift,Seagate/swift,takeshineshiro/swift,notmyname/swift,dpgoetz/swift,nadeemsyed/swift,matthewoliver/swift,tipabu/swift,tipabu/swift,bkolli/swift,gold3bear/swift,mjwtom/swift,swiftstack/swift,IPVL/swift-kilo,Em-Pan/swift,hbhdytf/mac,scality/ScalitySproxydSwift,thiagodasilva/swift,Khushbu27/Tutorial,xiaoguoai/ec-dev-swift,nadeemsyed/swift,williamthegrey/swift,openstack/swift,sarvesh-ranjan/swift,anishnarang/gswift,smerritt/swift,hurricanerix/swift,mjwtom/swift,bradleypj823/swift,dpgoetz/swift,eatbyte/Swift,Akanoa/swift,clayg/swift,gold3bear/swift,psachin/swift,swiftstack/swift,hbhdytf/mac2,openstack/swift,psachin/swift,wenhuizhang/swift,hurricanerix/swift,hbhdytf/mac2,wenhuizhang/swift,aerwin3/swift,maginatics/swift,NeCTAR-RC/swift,bkolli/swift,sarvesh-ranjan/swift,bradleypj823/swift,daasbank/swift,revoer/keystone-8.0.0,smerritt/swift,notmyname/swift,shibaniahegde/OpenStak_swift,NeCTAR-RC/swift,Em-Pan/swift,psachin/swift,openstack/swift,thiagodasilva/swift,notmyname/swift,iostackproject/IO-Bandwidth-Differentiation,maginatics/swift,tipabu/swift,larsbutler/swift,hurricanerix/swift,bouncestorage/swift,redbo/swift,AfonsoFGarcia/swift,notmyname/swift,clayg/swift,revoer/keystone-8.0.0,iostackproject/IO-Bandwidth-Differentiation,bouncestorage/swift,xiaoguoai/ec-dev-swift,hbhdytf/mac2,redhat-openstack/swift,hbhdytf/mac2,dencaval/swift,levythu/swift,nadeemsyed/swift,takeshineshiro/swift,mjzmjz/swift,scality/ScalitySproxydSwift,larsbutler/swift,zackmdavis/swift,dencaval/swift,daasbank/swift,matthewoliver/swift,aerwin3/swift,rackerlabs/swift,IPVL/swift-kilo,smerritt/swift,Akanoa/swift,clayg/swift,redbo/swift,tipabu/swift,hurricanerix/swift
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
|
<commit_before>#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
<commit_msg>Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24<commit_after>
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
|
<commit_before>#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
from swift import gettext_ as _
class TestTranslations(unittest.TestCase):
def test_translations(self):
translated_message = check_output(['python', __file__])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
print _('test message')
<commit_msg>Make test_translations test our tree
In order to run the correct classes, Python test framework adjusts
sys.path. However, these changes are not propagated to subprocesses.
Therefore, the test actually tries to test installed Swift, not
the one in which it is running.
The usual suggestion is to run "python setup.py develop" before
testing, but it's annoying and error-prone. If you forget it,
you may test the code in /usr very easily, and never know.
Let's just pass the correct path to subprocess. Much safer.
Change-Id: Ic71314e8462cf6e0579d704ffe9fbbfac7e6ba24<commit_after>#!/usr/bin/env python
#-*- coding:utf-8 -*-
import os
import unittest
import string
import sys
try:
from subprocess import check_output
except ImportError:
from subprocess import Popen, PIPE, CalledProcessError
def check_output(*popenargs, **kwargs):
"""Lifted from python 2.7 stdlib."""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be '
'overridden.')
process = Popen(stdout=PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise CalledProcessError(retcode, cmd, output=output)
return output
os.environ['LC_ALL'] = 'eo'
os.environ['SWIFT_LOCALEDIR'] = os.path.dirname(__file__)
class TestTranslations(unittest.TestCase):
def test_translations(self):
path = ':'.join(sys.path)
translated_message = check_output(['python', __file__, path])
self.assertEquals(translated_message, 'testo mesaĝon\n')
if __name__ == "__main__":
sys.path = string.split(sys.argv[1], ':')
from swift import gettext_ as _
print _('test message')
|
858c61a5d23685b62e590d28c896002291817bb1
|
pygotham/admin/schedule.py
|
pygotham/admin/schedule.py
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
Change admin columns for slots
|
Change admin columns for slots
|
Python
|
bsd-3-clause
|
pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,djds23/pygotham-1,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,PyGotham/pygotham,pathunstrom/pygotham,pathunstrom/pygotham
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
Change admin columns for slots
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
<commit_before>"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
<commit_msg>Change admin columns for slots<commit_after>
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
Change admin columns for slots"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
<commit_before>"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
<commit_msg>Change admin columns for slots<commit_after>"""Admin for schedule-related models."""
from pygotham.admin.utils import model_view
from pygotham.schedule import models
# This line is really long because pep257 needs it to be on one line.
__all__ = ('DayModelView', 'RoomModelView', 'SlotModelView', 'PresentationModelView')
CATEGORY = 'Schedule'
DayModelView = model_view(
models.Day,
'Days',
CATEGORY,
column_default_sort='date',
column_list=('date', 'event'),
form_columns=('event', 'date'),
)
RoomModelView = model_view(
models.Room,
'Rooms',
CATEGORY,
column_default_sort='order',
form_columns=('name', 'order'),
)
SlotModelView = model_view(
models.Slot,
'Slots',
CATEGORY,
column_list=('day', 'rooms', 'kind', 'start', 'end'),
form_columns=('day', 'rooms', 'kind', 'start', 'end', 'content_override'),
)
PresentationModelView = model_view(
models.Presentation,
'Presentations',
CATEGORY,
)
|
b1a15ffe1c5f916076ac107735baf76e1da23bea
|
aiopg/__init__.py
|
aiopg/__init__.py
|
import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
|
import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
|
Add version and version_info to exported public API
|
Add version and version_info to exported public API
|
Python
|
bsd-2-clause
|
eirnym/aiopg,nerandell/aiopg,hyzhak/aiopg,aio-libs/aiopg,luhn/aiopg,graingert/aiopg
|
import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
Add version and version_info to exported public API
|
import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
|
<commit_before>import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
<commit_msg>Add version and version_info to exported public API<commit_after>
|
import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
|
import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
Add version and version_info to exported public APIimport re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
|
<commit_before>import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
<commit_msg>Add version and version_info to exported public API<commit_after>import re
import sys
from collections import namedtuple
from .connection import connect, Connection
from .cursor import Cursor
from .pool import create_pool, Pool
__all__ = ('connect', 'create_pool', 'Connection', 'Cursor', 'Pool',
'version', 'version_info')
__version__ = '0.3.0'
version = __version__ + ' , Python ' + sys.version
VersionInfo = namedtuple('VersionInfo',
'major minor micro releaselevel serial')
def _parse_version(ver):
RE = (r'^(?P<major>\d+)\.(?P<minor>\d+)\.'
'(?P<micro>\d+)((?P<releaselevel>[a-z]+)(?P<serial>\d+)?)?$')
match = re.match(RE, ver)
try:
major = int(match.group('major'))
minor = int(match.group('minor'))
micro = int(match.group('micro'))
levels = {'rc': 'candidate',
'a': 'alpha',
'b': 'beta',
None: 'final'}
releaselevel = levels[match.group('releaselevel')]
serial = int(match.group('serial')) if match.group('serial') else 0
return VersionInfo(major, minor, micro, releaselevel, serial)
except Exception:
raise ImportError("Invalid package version {}".format(ver))
version_info = _parse_version(__version__)
# make pyflakes happy
(connect, create_pool, Connection, Cursor, Pool)
|
39b5378b0d52e226c410671a47934a02d18f678e
|
scripts/extract_pivots_from_model.py
|
scripts/extract_pivots_from_model.py
|
#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_tm import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
|
#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_dual_domain import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
|
Fix import for new script location.
|
Fix import for new script location.
|
Python
|
apache-2.0
|
tmills/uda,tmills/uda
|
#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_tm import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
Fix import for new script location.
|
#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_dual_domain import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_tm import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
<commit_msg>Fix import for new script location.<commit_after>
|
#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_dual_domain import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
|
#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_tm import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
Fix import for new script location.#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_dual_domain import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_tm import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
<commit_msg>Fix import for new script location.<commit_after>#!/usr/bin/env python
import sys
import numpy as np
import torch
from learn_pivots_dual_domain import PivotLearnerModel, StraightThroughLayer
def main(args):
if len(args) < 1:
sys.stderr.write("Required arguments: <model file> [num pivots (100)]\n")
sys.exit(-1)
num_pivots = 100
if len(args) > 1:
num_pivots = int(args[1])
model = torch.load(args[0])
vec = np.abs(model.feature.input_layer.vector.data.cpu().numpy())
inds = np.argsort(vec)
pivot_inds = inds[0, -num_pivots:]
pivot_inds.sort()
for x in pivot_inds:
print(x)
if __name__ == '__main__':
main(sys.argv[1:])
|
8f45a3c0b5e619009984946606ea6cffec49d79d
|
server/mlabns/tests/test_resolver.py
|
server/mlabns/tests/test_resolver.py
|
import gflags
import unittest2
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(message.POLICY_GEO, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(message.ADDRESS_FAMILY_IPv4, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
|
import gflags
import unittest2
from mlabns.util import constants
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(None, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(None, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(constants.GEOLOCATION_APP_ENGINE, query.geolocation_type)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
|
Update resolver tests to match new resolver
|
Update resolver tests to match new resolver
|
Python
|
apache-2.0
|
m-lab/mlab-ns,fernandalavalle/mlab-ns,fernandalavalle/mlab-ns,m-lab/mlab-ns,m-lab/mlab-ns,fernandalavalle/mlab-ns,fernandalavalle/mlab-ns,m-lab/mlab-ns
|
import gflags
import unittest2
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(message.POLICY_GEO, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(message.ADDRESS_FAMILY_IPv4, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
Update resolver tests to match new resolver
|
import gflags
import unittest2
from mlabns.util import constants
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(None, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(None, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(constants.GEOLOCATION_APP_ENGINE, query.geolocation_type)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
|
<commit_before>import gflags
import unittest2
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(message.POLICY_GEO, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(message.ADDRESS_FAMILY_IPv4, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
<commit_msg>Update resolver tests to match new resolver<commit_after>
|
import gflags
import unittest2
from mlabns.util import constants
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(None, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(None, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(constants.GEOLOCATION_APP_ENGINE, query.geolocation_type)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
|
import gflags
import unittest2
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(message.POLICY_GEO, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(message.ADDRESS_FAMILY_IPv4, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
Update resolver tests to match new resolverimport gflags
import unittest2
from mlabns.util import constants
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(None, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(None, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(constants.GEOLOCATION_APP_ENGINE, query.geolocation_type)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
|
<commit_before>import gflags
import unittest2
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(message.POLICY_GEO, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(message.ADDRESS_FAMILY_IPv4, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
<commit_msg>Update resolver tests to match new resolver<commit_after>import gflags
import unittest2
from mlabns.util import constants
from mlabns.util import message
from mlabns.util import resolver
class ResolverTestCase(unittest2.TestCase):
def testDefaultConstructor(self):
query = resolver.LookupQuery();
self.assertEqual(None, query.tool_id)
self.assertEqual(None, query.policy)
self.assertEqual(None, query.metro)
self.assertEqual(None, query.ip_address)
self.assertEqual(None, query.address_family)
self.assertEqual(None, query.city)
self.assertEqual(None, query.country)
self.assertEqual(None, query.latitude)
self.assertEqual(None, query.longitude)
self.assertEqual(constants.GEOLOCATION_APP_ENGINE, query.geolocation_type)
self.assertEqual(None, query.response_format)
def testInitializeFromDictionary(self):
# TODO
pass
if __name__ == '__main__':
unittest2.main()
|
0173d18e2c88f4b944b3b12df2259fb0d26fee1d
|
drogher/shippers/dhl.py
|
drogher/shippers/dhl.py
|
from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
|
from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
@property
def valid_checksum(self):
sequence, check_digit = self.tracking_number[:-1], self.tracking_number[-1]
return int(sequence) % 7 == int(check_digit)
|
Add DHL waybill checksum validation
|
Add DHL waybill checksum validation
|
Python
|
bsd-3-clause
|
jbittel/drogher
|
from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
Add DHL waybill checksum validation
|
from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
@property
def valid_checksum(self):
sequence, check_digit = self.tracking_number[:-1], self.tracking_number[-1]
return int(sequence) % 7 == int(check_digit)
|
<commit_before>from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
<commit_msg>Add DHL waybill checksum validation<commit_after>
|
from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
@property
def valid_checksum(self):
sequence, check_digit = self.tracking_number[:-1], self.tracking_number[-1]
return int(sequence) % 7 == int(check_digit)
|
from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
Add DHL waybill checksum validationfrom .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
@property
def valid_checksum(self):
sequence, check_digit = self.tracking_number[:-1], self.tracking_number[-1]
return int(sequence) % 7 == int(check_digit)
|
<commit_before>from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
<commit_msg>Add DHL waybill checksum validation<commit_after>from .base import Shipper
class DHL(Shipper):
barcode_pattern = r'^\d{10}$'
shipper = 'DHL'
@property
def valid_checksum(self):
sequence, check_digit = self.tracking_number[:-1], self.tracking_number[-1]
return int(sequence) % 7 == int(check_digit)
|
a843a423e3383661a3a47da49389dbf7ec59d196
|
tests/MenderAPI/__init__.py
|
tests/MenderAPI/__init__.py
|
import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "0.1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
|
import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "v1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
|
Update integrarion tests to use new versioning schema.
|
MEN-963: Update integrarion tests to use new versioning schema.
Signed-off-by: Maciej Mrowiec <0c5b7bd73e5de492f16f5039ef275802502d5ce2@gmail.com>
|
Python
|
apache-2.0
|
pasinskim/integration,GregorioDiStefano/integration,GregorioDiStefano/integration,pasinskim/integration,pasinskim/integration
|
import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "0.1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
MEN-963: Update integrarion tests to use new versioning schema.
Signed-off-by: Maciej Mrowiec <0c5b7bd73e5de492f16f5039ef275802502d5ce2@gmail.com>
|
import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "v1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
|
<commit_before>import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "0.1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
<commit_msg>MEN-963: Update integrarion tests to use new versioning schema.
Signed-off-by: Maciej Mrowiec <0c5b7bd73e5de492f16f5039ef275802502d5ce2@gmail.com><commit_after>
|
import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "v1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
|
import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "0.1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
MEN-963: Update integrarion tests to use new versioning schema.
Signed-off-by: Maciej Mrowiec <0c5b7bd73e5de492f16f5039ef275802502d5ce2@gmail.com>import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "v1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
|
<commit_before>import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "0.1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
<commit_msg>MEN-963: Update integrarion tests to use new versioning schema.
Signed-off-by: Maciej Mrowiec <0c5b7bd73e5de492f16f5039ef275802502d5ce2@gmail.com><commit_after>import os
import logging
api_version = os.getenv("MENDER_API_VERSION", "v1")
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
#logging.getLogger("paramiko").setLevel(logging.DEBUG)
logging.info("Setting api_version as: " + api_version)
import authentication
import admission
import deployments
import artifacts
import inventory
auth = authentication.Authentication()
adm = admission.Admission(auth)
deploy = deployments.Deployments(auth)
image = artifacts.Artifacts()
inv = inventory.Inventory(auth)
|
3ecc978421e1bcceb30635e875333e52272e07a3
|
tests/providers/test_ovh.py
|
tests/providers/test_ovh.py
|
# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
|
# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
Select ovh-eu entrypoint for test integration
|
Select ovh-eu entrypoint for test integration
|
Python
|
mit
|
tnwhitwell/lexicon,AnalogJ/lexicon,AnalogJ/lexicon,tnwhitwell/lexicon
|
# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
Select ovh-eu entrypoint for test integration
|
# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
<commit_before># Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
<commit_msg>Select ovh-eu entrypoint for test integration<commit_after>
|
# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
Select ovh-eu entrypoint for test integration# Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
<commit_before># Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['domain'] = self.domain
return cmd_options
<commit_msg>Select ovh-eu entrypoint for test integration<commit_after># Test for one implementation of the interface
from unittest import TestCase
from lexicon.providers.ovh import Provider
from lexicon.common.options_handler import env_auth_options
from integration_tests import IntegrationTests
# Hook into testing framework by inheriting unittest.TestCase and reuse
# the tests which *each and every* implementation of the interface must
# pass, by inheritance from integration_tests.IntegrationTests
class OvhProviderTests(TestCase, IntegrationTests):
Provider = Provider
provider_name = 'ovh'
domain = 'elogium.net'
def _filter_headers(self):
return ['X-Ovh-Application', 'X-Ovh-Consumer', 'X-Ovh-Signature']
# Override _test_options to call env_auth_options and then import auth config from env variables
def _test_options(self):
cmd_options = env_auth_options(self.provider_name)
cmd_options['auth_entrypoint'] = 'ovh-eu'
cmd_options['domain'] = self.domain
return cmd_options
|
42a6421fedadaea5f583dbccb8908b9b2df97231
|
spacq/devices/lakeshore/mock/mock_tc335.py
|
spacq/devices/lakeshore/mock/mock_tc335.py
|
import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
MockAbstractDevice.__init__(self, *args, **kwargs)
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
|
import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
MockAbstractDevice.__init__(self, *args, **kwargs)
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
|
Fix a bug involving how mock_state is set
|
Fix a bug involving how mock_state is set
|
Python
|
bsd-2-clause
|
ghwatson/SpanishAcquisitionIQC,ghwatson/SpanishAcquisitionIQC
|
import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
MockAbstractDevice.__init__(self, *args, **kwargs)
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
Fix a bug involving how mock_state is set
|
import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
MockAbstractDevice.__init__(self, *args, **kwargs)
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
|
<commit_before>import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
MockAbstractDevice.__init__(self, *args, **kwargs)
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
<commit_msg>Fix a bug involving how mock_state is set<commit_after>
|
import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
MockAbstractDevice.__init__(self, *args, **kwargs)
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
|
import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
MockAbstractDevice.__init__(self, *args, **kwargs)
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
Fix a bug involving how mock_state is setimport random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
MockAbstractDevice.__init__(self, *args, **kwargs)
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
|
<commit_before>import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
MockAbstractDevice.__init__(self, *args, **kwargs)
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
<commit_msg>Fix a bug involving how mock_state is set<commit_after>import random
from ...mock.mock_abstract_device import MockAbstractDevice
from ..tc335 import TC335
"""
Mock Lakeshore 335 Temperature Controller
"""
class MockTC335(MockAbstractDevice, TC335):
"""
Mock interface for Lakeshore 335 Temperature Controller.
"""
def __init__(self, *args, **kwargs):
self.mocking = TC335
MockAbstractDevice.__init__(self, *args, **kwargs)
self.mock_state = {}
self.mock_state['readingstatus'] = 0
self.mock_state['read_only'] = ['temperature']
def _reset(self):
pass
def write(self, message, result=None, done=False):
if not done:
cmd, args, query = self._split_message(message)
if cmd[0] == 'rdgst' and query:
result = self.mock_state['readingstatus']
done = True
elif cmd[0] == 'krdg' and query:
result = random.randint(5,100)
done = True
MockAbstractDevice.write(self, message, result, done)
name = '335 Temperature Controller'
implementation = MockTC335
|
2c54a9eb78a1cb88ef03db97e21e376ae764a33e
|
errata/admin_actions.py
|
errata/admin_actions.py
|
import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
Make use of Django's StreamingHttpResponse for large CSV exports
|
Make use of Django's StreamingHttpResponse for large CSV exports
|
Python
|
agpl-3.0
|
Connexions/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms
|
import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csvMake use of Django's StreamingHttpResponse for large CSV exports
|
import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
<commit_before>import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv<commit_msg>Make use of Django's StreamingHttpResponse for large CSV exports<commit_after>
|
import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csvMake use of Django's StreamingHttpResponse for large CSV exportsimport unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
<commit_before>import unicodecsv
from django.http import HttpResponse
from django.utils.encoding import smart_str
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
writer = unicodecsv.writer(response, encoding='utf-8')
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv<commit_msg>Make use of Django's StreamingHttpResponse for large CSV exports<commit_after>import unicodecsv
from django.http import StreamingHttpResponse
class Echo:
"""An object that implements just the write method of the file-like
interface.
"""
def write(self, value):
"""Write the value by returning it, instead of storing in a buffer."""
return value
def export_as_csv_action(description="Export selected objects as CSV file",
fields=None, exclude=None, header=True):
"""
This function returns an export csv action
'fields' and 'exclude' work like in django ModelForm
'header' is whether or not to output the column names as the first row
"""
def export_as_csv(modeladmin, request, queryset):
opts = modeladmin.model._meta
if not fields:
field_names = [field.name for field in opts.fields]
else:
field_names = fields
response = StreamingHttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename=%s.csv' % str(opts).replace('.', '_')
pseudo_buffer = Echo()
writer = unicodecsv.writer(pseudo_buffer)
if header:
writer.writerow(field_names)
for obj in queryset:
row = [getattr(obj, field)() if callable(getattr(obj, field)) else getattr(obj, field) for field in field_names]
writer.writerow(row)
return response
export_as_csv.short_description = description
return export_as_csv
|
d3c7ae5389f2fd90ae35d87f87e4f7dd01572f4a
|
numpy/f2py/__init__.py
|
numpy/f2py/__init__.py
|
#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
from info import __doc__
import f2py2e
run_main = f2py2e.run_main
main = f2py2e.main
import f2py_testing
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
|
#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
import f2py2e
import f2py_testing
import diagnose
from info import __doc__
run_main = f2py2e.run_main
main = f2py2e.main
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
|
Add diagnose to f2py package. This makes the tests a bit easier to fix.
|
ENH: Add diagnose to f2py package. This makes the tests a bit easier to fix.
|
Python
|
bsd-3-clause
|
ChristopherHogan/numpy,ChristopherHogan/numpy,seberg/numpy,bmorris3/numpy,njase/numpy,BabeNovelty/numpy,mhvk/numpy,tdsmith/numpy,cowlicks/numpy,MaPePeR/numpy,rmcgibbo/numpy,utke1/numpy,simongibbons/numpy,GrimDerp/numpy,shoyer/numpy,numpy/numpy-refactor,has2k1/numpy,ESSS/numpy,githubmlai/numpy,rgommers/numpy,Srisai85/numpy,pizzathief/numpy,joferkington/numpy,rhythmsosad/numpy,embray/numpy,dch312/numpy,embray/numpy,jonathanunderwood/numpy,trankmichael/numpy,has2k1/numpy,SunghanKim/numpy,bmorris3/numpy,madphysicist/numpy,jschueller/numpy,musically-ut/numpy,felipebetancur/numpy,WillieMaddox/numpy,numpy/numpy,SunghanKim/numpy,sigma-random/numpy,astrofrog/numpy,WarrenWeckesser/numpy,NextThought/pypy-numpy,ajdawson/numpy,MaPePeR/numpy,rudimeier/numpy,pizzathief/numpy,grlee77/numpy,Dapid/numpy,stefanv/numpy,ajdawson/numpy,CMartelLML/numpy,GrimDerp/numpy,jonathanunderwood/numpy,ddasilva/numpy,shoyer/numpy,jorisvandenbossche/numpy,rherault-insa/numpy,MichaelAquilina/numpy,Eric89GXL/numpy,SunghanKim/numpy,mwiebe/numpy,rajathkumarmp/numpy,jankoslavic/numpy,larsmans/numpy,tdsmith/numpy,KaelChen/numpy,brandon-rhodes/numpy,SiccarPoint/numpy,rajathkumarmp/numpy,numpy/numpy,dato-code/numpy,seberg/numpy,pdebuyl/numpy,KaelChen/numpy,Anwesh43/numpy,leifdenby/numpy,joferkington/numpy,matthew-brett/numpy,BabeNovelty/numpy,gmcastil/numpy,brandon-rhodes/numpy,MichaelAquilina/numpy,anntzer/numpy,Eric89GXL/numpy,mortada/numpy,groutr/numpy,ahaldane/numpy,simongibbons/numpy,anntzer/numpy,astrofrog/numpy,stuarteberg/numpy,ChanderG/numpy,ContinuumIO/numpy,jakirkham/numpy,grlee77/numpy,pizzathief/numpy,kirillzhuravlev/numpy,NextThought/pypy-numpy,dimasad/numpy,astrofrog/numpy,GaZ3ll3/numpy,andsor/numpy,ViralLeadership/numpy,MichaelAquilina/numpy,WillieMaddox/numpy,mingwpy/numpy,dimasad/numpy,anntzer/numpy,rhythmsosad/numpy,simongibbons/numpy,numpy/numpy-refactor,sigma-random/numpy,tacaswell/numpy,bringingheavendown/numpy,ahaldane/numpy,b-carter/numpy,jakirkham/numpy,BabeNovelty/numpy,mathdd/numpy,mwiebe/numpy,drasmuss/numpy,WarrenWeckesser/numpy,ChristopherHogan/numpy,b-carter/numpy,andsor/numpy,endolith/numpy,bertrand-l/numpy,mwiebe/numpy,endolith/numpy,ogrisel/numpy,abalkin/numpy,rmcgibbo/numpy,mhvk/numpy,ChanderG/numpy,GrimDerp/numpy,seberg/numpy,jorisvandenbossche/numpy,mingwpy/numpy,ogrisel/numpy,ogrisel/numpy,mortada/numpy,ewmoore/numpy,mindw/numpy,madphysicist/numpy,pelson/numpy,mindw/numpy,dato-code/numpy,matthew-brett/numpy,mhvk/numpy,sinhrks/numpy,stuarteberg/numpy,pelson/numpy,njase/numpy,SiccarPoint/numpy,skymanaditya1/numpy,ViralLeadership/numpy,ssanderson/numpy,Yusa95/numpy,trankmichael/numpy,Anwesh43/numpy,WillieMaddox/numpy,jonathanunderwood/numpy,has2k1/numpy,GaZ3ll3/numpy,ajdawson/numpy,Yusa95/numpy,ChristopherHogan/numpy,dwillmer/numpy,Anwesh43/numpy,rudimeier/numpy,anntzer/numpy,ahaldane/numpy,shoyer/numpy,MSeifert04/numpy,charris/numpy,ChanderG/numpy,Dapid/numpy,grlee77/numpy,pbrod/numpy,pbrod/numpy,nguyentu1602/numpy,endolith/numpy,ekalosak/numpy,SunghanKim/numpy,numpy/numpy-refactor,MSeifert04/numpy,ewmoore/numpy,behzadnouri/numpy,gfyoung/numpy,ContinuumIO/numpy,mattip/numpy,NextThought/pypy-numpy,rgommers/numpy,solarjoe/numpy,astrofrog/numpy,gmcastil/numpy,naritta/numpy,chiffa/numpy,sigma-random/numpy,tdsmith/numpy,ewmoore/numpy,behzadnouri/numpy,ssanderson/numpy,nguyentu1602/numpy,felipebetancur/numpy,grlee77/numpy,rgommers/numpy,yiakwy/numpy,ekalosak/numpy,MaPePeR/numpy,b-carter/numpy,pizzathief/numpy,jankoslavic/numpy,dwf/numpy,nguyentu1602/numpy,SiccarPoint/numpy,dch312/numpy,trankmichael/numpy,BabeNovelty/numpy,tacaswell/numpy,skwbc/numpy,kirillzhuravlev/numpy,behzadnouri/numpy,andsor/numpy,dwillmer/numpy,grlee77/numpy,moreati/numpy,dwf/numpy,cowlicks/numpy,chiffa/numpy,utke1/numpy,pyparallel/numpy,sonnyhu/numpy,chiffa/numpy,mhvk/numpy,andsor/numpy,AustereCuriosity/numpy,drasmuss/numpy,rudimeier/numpy,ESSS/numpy,cjermain/numpy,Linkid/numpy,tacaswell/numpy,mathdd/numpy,argriffing/numpy,ddasilva/numpy,AustereCuriosity/numpy,sigma-random/numpy,pyparallel/numpy,ogrisel/numpy,dwillmer/numpy,rhythmsosad/numpy,abalkin/numpy,Srisai85/numpy,pbrod/numpy,ddasilva/numpy,bringingheavendown/numpy,simongibbons/numpy,pelson/numpy,embray/numpy,stuarteberg/numpy,mindw/numpy,bertrand-l/numpy,GrimDerp/numpy,tynn/numpy,argriffing/numpy,embray/numpy,nbeaver/numpy,musically-ut/numpy,MSeifert04/numpy,shoyer/numpy,ESSS/numpy,kiwifb/numpy,skymanaditya1/numpy,tynn/numpy,rherault-insa/numpy,sonnyhu/numpy,cjermain/numpy,yiakwy/numpy,naritta/numpy,maniteja123/numpy,dato-code/numpy,pdebuyl/numpy,WarrenWeckesser/numpy,KaelChen/numpy,WarrenWeckesser/numpy,gmcastil/numpy,astrofrog/numpy,nbeaver/numpy,KaelChen/numpy,hainm/numpy,larsmans/numpy,numpy/numpy-refactor,SiccarPoint/numpy,bertrand-l/numpy,hainm/numpy,jakirkham/numpy,Yusa95/numpy,jorisvandenbossche/numpy,naritta/numpy,pelson/numpy,numpy/numpy-refactor,CMartelLML/numpy,matthew-brett/numpy,mortada/numpy,cjermain/numpy,seberg/numpy,hainm/numpy,ChanderG/numpy,drasmuss/numpy,jankoslavic/numpy,kiwifb/numpy,dwf/numpy,BMJHayward/numpy,ahaldane/numpy,rmcgibbo/numpy,tynn/numpy,chatcannon/numpy,mortada/numpy,dch312/numpy,joferkington/numpy,groutr/numpy,empeeu/numpy,ekalosak/numpy,groutr/numpy,BMJHayward/numpy,WarrenWeckesser/numpy,matthew-brett/numpy,sonnyhu/numpy,bmorris3/numpy,stefanv/numpy,embray/numpy,abalkin/numpy,brandon-rhodes/numpy,stefanv/numpy,madphysicist/numpy,dimasad/numpy,bmorris3/numpy,mattip/numpy,GaZ3ll3/numpy,charris/numpy,empeeu/numpy,mathdd/numpy,jschueller/numpy,MSeifert04/numpy,pdebuyl/numpy,jakirkham/numpy,mattip/numpy,yiakwy/numpy,felipebetancur/numpy,bringingheavendown/numpy,ContinuumIO/numpy,BMJHayward/numpy,skwbc/numpy,numpy/numpy,Linkid/numpy,maniteja123/numpy,empeeu/numpy,githubmlai/numpy,madphysicist/numpy,empeeu/numpy,MaPePeR/numpy,utke1/numpy,jorisvandenbossche/numpy,ekalosak/numpy,naritta/numpy,leifdenby/numpy,pbrod/numpy,Eric89GXL/numpy,kirillzhuravlev/numpy,jorisvandenbossche/numpy,Yusa95/numpy,musically-ut/numpy,kirillzhuravlev/numpy,rhythmsosad/numpy,matthew-brett/numpy,mingwpy/numpy,immerrr/numpy,mhvk/numpy,pyparallel/numpy,cjermain/numpy,solarjoe/numpy,ssanderson/numpy,githubmlai/numpy,mingwpy/numpy,skymanaditya1/numpy,stuarteberg/numpy,skymanaditya1/numpy,has2k1/numpy,BMJHayward/numpy,MichaelAquilina/numpy,AustereCuriosity/numpy,gfyoung/numpy,sinhrks/numpy,rgommers/numpy,stefanv/numpy,charris/numpy,immerrr/numpy,sonnyhu/numpy,moreati/numpy,ewmoore/numpy,dwillmer/numpy,dch312/numpy,joferkington/numpy,sinhrks/numpy,dimasad/numpy,jschueller/numpy,pizzathief/numpy,nbeaver/numpy,leifdenby/numpy,endolith/numpy,CMartelLML/numpy,simongibbons/numpy,Dapid/numpy,dwf/numpy,felipebetancur/numpy,githubmlai/numpy,chatcannon/numpy,rajathkumarmp/numpy,mathdd/numpy,Linkid/numpy,GaZ3ll3/numpy,njase/numpy,solarjoe/numpy,skwbc/numpy,larsmans/numpy,pdebuyl/numpy,jakirkham/numpy,NextThought/pypy-numpy,cowlicks/numpy,rajathkumarmp/numpy,hainm/numpy,chatcannon/numpy,trankmichael/numpy,pbrod/numpy,ewmoore/numpy,numpy/numpy,jschueller/numpy,Srisai85/numpy,cowlicks/numpy,rmcgibbo/numpy,MSeifert04/numpy,dato-code/numpy,madphysicist/numpy,musically-ut/numpy,maniteja123/numpy,Srisai85/numpy,immerrr/numpy,Anwesh43/numpy,ajdawson/numpy,Eric89GXL/numpy,ahaldane/numpy,charris/numpy,mindw/numpy,nguyentu1602/numpy,kiwifb/numpy,ogrisel/numpy,sinhrks/numpy,CMartelLML/numpy,Linkid/numpy,brandon-rhodes/numpy,rudimeier/numpy,larsmans/numpy,immerrr/numpy,moreati/numpy,yiakwy/numpy,mattip/numpy,pelson/numpy,tdsmith/numpy,argriffing/numpy,shoyer/numpy,ViralLeadership/numpy,rherault-insa/numpy,jankoslavic/numpy,gfyoung/numpy,stefanv/numpy,dwf/numpy
|
#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
from info import __doc__
import f2py2e
run_main = f2py2e.run_main
main = f2py2e.main
import f2py_testing
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
ENH: Add diagnose to f2py package. This makes the tests a bit easier to fix.
|
#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
import f2py2e
import f2py_testing
import diagnose
from info import __doc__
run_main = f2py2e.run_main
main = f2py2e.main
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
|
<commit_before>#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
from info import __doc__
import f2py2e
run_main = f2py2e.run_main
main = f2py2e.main
import f2py_testing
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
<commit_msg>ENH: Add diagnose to f2py package. This makes the tests a bit easier to fix.<commit_after>
|
#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
import f2py2e
import f2py_testing
import diagnose
from info import __doc__
run_main = f2py2e.run_main
main = f2py2e.main
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
|
#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
from info import __doc__
import f2py2e
run_main = f2py2e.run_main
main = f2py2e.main
import f2py_testing
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
ENH: Add diagnose to f2py package. This makes the tests a bit easier to fix.#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
import f2py2e
import f2py_testing
import diagnose
from info import __doc__
run_main = f2py2e.run_main
main = f2py2e.main
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
|
<commit_before>#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
from info import __doc__
import f2py2e
run_main = f2py2e.run_main
main = f2py2e.main
import f2py_testing
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
<commit_msg>ENH: Add diagnose to f2py package. This makes the tests a bit easier to fix.<commit_after>#!/usr/bin/env python
__all__ = ['run_main','compile','f2py_testing']
import os
import sys
import commands
import f2py2e
import f2py_testing
import diagnose
from info import __doc__
run_main = f2py2e.run_main
main = f2py2e.main
def compile(source,
modulename = 'untitled',
extra_args = '',
verbose = 1,
source_fn = None
):
''' Build extension module from processing source with f2py.
Read the source of this function for more information.
'''
from numpy.distutils.exec_command import exec_command
import tempfile
if source_fn is None:
fname = os.path.join(tempfile.mktemp()+'.f')
else:
fname = source_fn
f = open(fname,'w')
f.write(source)
f.close()
args = ' -c -m %s %s %s'%(modulename,fname,extra_args)
c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args)
s,o = exec_command(c)
if source_fn is None:
try: os.remove(fname)
except OSError: pass
return s
|
bcc6d199186953b5ae05f7e93bf61c169ac89c77
|
opps/archives/admin.py
|
opps/archives/admin.py
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
|
# coding: utf-8
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
list_display = ['title', 'slug', 'download_link', 'published']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def download_link(self, obj):
html = '<a href="{}">{}</a>'.format(obj.archive.url,
unicode(_(u'Download')))
return html
download_link.short_description = _(u'download')
download_link.allow_tags = True
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
|
Add list_display on FileAdmin and download_link def
|
Add list_display on FileAdmin and download_link def
|
Python
|
mit
|
YACOWS/opps,opps/opps,opps/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
Add list_display on FileAdmin and download_link def
|
# coding: utf-8
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
list_display = ['title', 'slug', 'download_link', 'published']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def download_link(self, obj):
html = '<a href="{}">{}</a>'.format(obj.archive.url,
unicode(_(u'Download')))
return html
download_link.short_description = _(u'download')
download_link.allow_tags = True
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
|
<commit_before>from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
<commit_msg>Add list_display on FileAdmin and download_link def<commit_after>
|
# coding: utf-8
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
list_display = ['title', 'slug', 'download_link', 'published']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def download_link(self, obj):
html = '<a href="{}">{}</a>'.format(obj.archive.url,
unicode(_(u'Download')))
return html
download_link.short_description = _(u'download')
download_link.allow_tags = True
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
|
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
Add list_display on FileAdmin and download_link def# coding: utf-8
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
list_display = ['title', 'slug', 'download_link', 'published']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def download_link(self, obj):
html = '<a href="{}">{}</a>'.format(obj.archive.url,
unicode(_(u'Download')))
return html
download_link.short_description = _(u'download')
download_link.allow_tags = True
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
|
<commit_before>from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
<commit_msg>Add list_display on FileAdmin and download_link def<commit_after># coding: utf-8
from django.contrib import admin
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from opps.core.admin import apply_opps_rules
from opps.contrib.multisite.admin import AdminViewPermission
from .models import File
@apply_opps_rules('archives')
class FileAdmin(AdminViewPermission):
search_fields = ['title', 'slug']
raw_id_fields = ['user']
list_display = ['title', 'slug', 'download_link', 'published']
ordering = ('-date_available',)
list_filter = ['date_available', 'published']
prepopulated_fields = {"slug": ["title"]}
fieldsets = (
(_(u'Identification'), {
'fields': ('site', 'title', 'slug',)}),
(_(u'Content'), {
'fields': ('description', 'archive', 'archive_link', 'tags')}),
(_(u'Publication'), {
'classes': ('extrapretty'),
'fields': ('published', 'date_available',)}),
)
def download_link(self, obj):
html = '<a href="{}">{}</a>'.format(obj.archive.url,
unicode(_(u'Download')))
return html
download_link.short_description = _(u'download')
download_link.allow_tags = True
def save_model(self, request, obj, form, change):
if not change:
obj.user = get_user_model().objects.get(pk=request.user.pk)
obj.date_insert = timezone.now()
obj.date_update = timezone.now()
obj.save()
admin.site.register(File, FileAdmin)
|
71241579d678185eb315ba2658f1c7eb9ec75603
|
example/django/tests.py
|
example/django/tests.py
|
from __future__ import absolute_import
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
|
from __future__ import absolute_import
from django.core.cache import cache
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
def test_use_the_cache(self):
for n in xrange(2 ** 6):
cache.set('test-%s' % (n - 1), n)
cache.set('test-%s-%s' % (n, n - 1), cache.get('test-%s' % (n - 1)))
|
Add basic cache test example
|
Add basic cache test example
|
Python
|
apache-2.0
|
disqus/nose-performance,disqus/nose-performance
|
from __future__ import absolute_import
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
Add basic cache test example
|
from __future__ import absolute_import
from django.core.cache import cache
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
def test_use_the_cache(self):
for n in xrange(2 ** 6):
cache.set('test-%s' % (n - 1), n)
cache.set('test-%s-%s' % (n, n - 1), cache.get('test-%s' % (n - 1)))
|
<commit_before>from __future__ import absolute_import
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
<commit_msg>Add basic cache test example<commit_after>
|
from __future__ import absolute_import
from django.core.cache import cache
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
def test_use_the_cache(self):
for n in xrange(2 ** 6):
cache.set('test-%s' % (n - 1), n)
cache.set('test-%s-%s' % (n, n - 1), cache.get('test-%s' % (n - 1)))
|
from __future__ import absolute_import
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
Add basic cache test examplefrom __future__ import absolute_import
from django.core.cache import cache
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
def test_use_the_cache(self):
for n in xrange(2 ** 6):
cache.set('test-%s' % (n - 1), n)
cache.set('test-%s-%s' % (n, n - 1), cache.get('test-%s' % (n - 1)))
|
<commit_before>from __future__ import absolute_import
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
<commit_msg>Add basic cache test example<commit_after>from __future__ import absolute_import
from django.core.cache import cache
from django.contrib.auth.models import User
from noseperf.testcases import DjangoPerformanceTest
class DjangoSampleTest(DjangoPerformanceTest):
def test_create_a_bunch_of_users(self):
for n in xrange(2 ** 8):
User.objects.create(username='test-%d' % n, email='test-%d@example.com' % n)
def test_use_the_cache(self):
for n in xrange(2 ** 6):
cache.set('test-%s' % (n - 1), n)
cache.set('test-%s-%s' % (n, n - 1), cache.get('test-%s' % (n - 1)))
|
324ce82f25c78bce7f92af52952f036ba48e72e7
|
astrobin_apps_notifications/utils.py
|
astrobin_apps_notifications/utils.py
|
# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
|
# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL + '/'})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
|
Revert "Drop extra trailing slash from notices_url"
|
Revert "Drop extra trailing slash from notices_url"
This reverts commit 1eb4d00e005f22ae452ce9d36b9fce69fa9b96f7.
|
Python
|
agpl-3.0
|
astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin
|
# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
Revert "Drop extra trailing slash from notices_url"
This reverts commit 1eb4d00e005f22ae452ce9d36b9fce69fa9b96f7.
|
# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL + '/'})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
|
<commit_before># Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
<commit_msg>Revert "Drop extra trailing slash from notices_url"
This reverts commit 1eb4d00e005f22ae452ce9d36b9fce69fa9b96f7.<commit_after>
|
# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL + '/'})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
|
# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
Revert "Drop extra trailing slash from notices_url"
This reverts commit 1eb4d00e005f22ae452ce9d36b9fce69fa9b96f7.# Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL + '/'})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
|
<commit_before># Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
<commit_msg>Revert "Drop extra trailing slash from notices_url"
This reverts commit 1eb4d00e005f22ae452ce9d36b9fce69fa9b96f7.<commit_after># Python
import simplejson
import urllib2
# Django
from django.apps import apps
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
# Third party
from notification import models as notification
from persistent_messages.models import Message
def push_notification(recipients, notice_type, data):
data.update({'notices_url': settings.ASTROBIN_BASE_URL + '/'})
notification.send(recipients, notice_type, data)
def get_recent_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications = Message.objects.filter(user = user).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_unseen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = False).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
def get_seen_notifications(user, n = 10):
if not user.is_authenticated():
return None
notifications =\
Message.objects.filter(user = user, read = True).order_by('-created')
if n >= 0:
notifications = notifications[:n]
return notifications
|
82bb5e5a6c81b2d473ec815c3b9a8e5aee154ff5
|
meinberlin/apps/plans/urls.py
|
meinberlin/apps/plans/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/all/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/format/xslx/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
|
Use format in export url
|
Use format in export url
|
Python
|
agpl-3.0
|
liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/all/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
Use format in export url
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/format/xslx/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/all/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
<commit_msg>Use format in export url<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/format/xslx/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/all/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
Use format in export urlfrom django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/format/xslx/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/all/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
<commit_msg>Use format in export url<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^(?P<slug>[-\w_]+)/$',
views.PlanDetailView.as_view(), name='plan-detail'),
url('^export/format/xslx/$',
views.PlanExportView.as_view(), name='plan-export'),
url('^$',
views.PlanListView.as_view(), name='plan-list'),
]
|
2cf5041ff923fbecdcd31595d8340d12bb4d6283
|
build/copy_sources.py
|
build/copy_sources.py
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
print 'From %s to %s\n' % (argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
Add information on Copy command.
|
Add information on Copy command.
Adding extra information to track down mysterious mac build failures.
tbr=bradnelson@google.com
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@9679 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2
|
Python
|
bsd-3-clause
|
sbc100/native_client,nacl-webkit/native_client,sbc100/native_client,nacl-webkit/native_client,nacl-webkit/native_client,sbc100/native_client,nacl-webkit/native_client,sbc100/native_client,sbc100/native_client,sbc100/native_client,nacl-webkit/native_client
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
Add information on Copy command.
Adding extra information to track down mysterious mac build failures.
tbr=bradnelson@google.com
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@9679 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
print 'From %s to %s\n' % (argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
<commit_msg>Add information on Copy command.
Adding extra information to track down mysterious mac build failures.
tbr=bradnelson@google.com
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@9679 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2<commit_after>
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
print 'From %s to %s\n' % (argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
Add information on Copy command.
Adding extra information to track down mysterious mac build failures.
tbr=bradnelson@google.com
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@9679 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
print 'From %s to %s\n' % (argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
<commit_msg>Add information on Copy command.
Adding extra information to track down mysterious mac build failures.
tbr=bradnelson@google.com
git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@9679 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2<commit_after>#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import sys
"""Copy Sources
Copy from a source file or directory to a new file or directory. This
supports renaming of the file.
"""
def ErrOut(text):
print '\n\n'
print ' '.join(sys.argv)
print text
sys.exit(1)
def MakeDir(outdir):
if outdir and not os.path.exists(outdir):
os.makedirs(outdir)
def Main(argv):
if len(argv) != 3:
print 'Expecting: copy_sources.py <source file> <dest file/dir>'
return 1
if not os.path.exists(argv[1]):
print 'File not found: %s' % argv[1]
return 1
shutil.copy(argv[1], argv[2])
print 'From %s to %s\n' % (argv[1], argv[2])
return 0
if __name__ == '__main__':
sys.exit(Main(sys.argv))
|
efabe61cec636d5104a639b8d5cfef23eb840dd7
|
apps/live/urls.py
|
apps/live/urls.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
Remove the missing view references.
|
Remove the missing view references.
|
Python
|
apache-2.0
|
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
Remove the missing view references.
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
<commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
<commit_msg>Remove the missing view references.<commit_after>
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
Remove the missing view references.# -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
<commit_before># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import (AwayView, DiscussionView, EpilogueView, GameView,
NotifierView, PrologueView, StatusView)
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
<commit_msg>Remove the missing view references.<commit_after># -*- coding: utf-8 -*-
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.views.generic import RedirectView
from .views import StatusView
urlpatterns = patterns('',
# Because sooner or later, avalonstar.tv/ will be a welcome page.
url(r'^$', name='site-home', view=RedirectView.as_view(url='http://twitch.tv/avalonstar')),
# Status (for bots).
url(r'^status/$', name='live-status', view=StatusView.as_view()),
)
|
f64cb48d51e1bcc3879a40d308452c4e65d13439
|
src/pymfony/component/system/serializer.py
|
src/pymfony/component/system/serializer.py
|
# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
def serialize(obj):
return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
|
# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
PICKLE_PROTOCOL = 2;
def serialize(obj):
return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
|
Use pickle protocole 2 to BC for Python2*
|
[System][Serializer] Use pickle protocole 2 to BC for Python2*
|
Python
|
mit
|
pymfony/pymfony
|
# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
def serialize(obj):
return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
[System][Serializer] Use pickle protocole 2 to BC for Python2*
|
# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
PICKLE_PROTOCOL = 2;
def serialize(obj):
return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
def serialize(obj):
return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
<commit_msg>[System][Serializer] Use pickle protocole 2 to BC for Python2*<commit_after>
|
# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
PICKLE_PROTOCOL = 2;
def serialize(obj):
return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
|
# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
def serialize(obj):
return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
[System][Serializer] Use pickle protocole 2 to BC for Python2*# -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
PICKLE_PROTOCOL = 2;
def serialize(obj):
return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
def serialize(obj):
return encodebytes(dumps(obj)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
<commit_msg>[System][Serializer] Use pickle protocole 2 to BC for Python2*<commit_after># -*- coding: utf-8 -*-
# This file is part of the pymfony package.
#
# (c) Alexandre Quercia <alquerci@email.com>
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
from __future__ import absolute_import;
from pickle import dumps;
from pickle import loads;
try:
from base64 import encodebytes;
from base64 import decodebytes;
except Exception:
from base64 import encodestring as encodebytes;
from base64 import decodestring as decodebytes;
"""
"""
CHARSET = 'UTF-8';
PICKLE_PROTOCOL = 2;
def serialize(obj):
return encodebytes(dumps(obj, PICKLE_PROTOCOL)).decode(CHARSET).replace('\n', '');
def unserialize(s):
return loads(decodebytes(s.encode(CHARSET)));
|
be89b2d9617fd5b837695e4322a2c98e4d4346cc
|
semillas_backend/users/serializers.py
|
semillas_backend/users/serializers.py
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
Add phone and email to user serializer
|
Add phone and email to user serializer
|
Python
|
mit
|
Semillas/semillas_backend,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform,Semillas/semillas_backend,Semillas/semillas_platform
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
Add phone and email to user serializer
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
<commit_before>#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
<commit_msg>Add phone and email to user serializer<commit_after>
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
Add phone and email to user serializer#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
<commit_before>#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
<commit_msg>Add phone and email to user serializer<commit_after>#from phonenumber_field.serializerfields import PhoneNumberField
from rest_framework import serializers
from drf_extra_fields.geo_fields import PointField
from .models import User
class UserSerializer(serializers.ModelSerializer):
""" Usage:
from rest_framework.renderers import JSONRenderer
from semillas_backend.users.serializers import UserSerializer
JSONRenderer().render(UserSerializer(user_instance).data)
"""
location = PointField()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'email', 'phone')
class UpdateUserSerializer(serializers.ModelSerializer):
name = serializers.CharField(required=False)
#phone = PhoneNumberField(required=False)
email = serializers.CharField(required=False)
picture = serializers.ImageField(required=False)
uuid = serializers.CharField(read_only=True)
class Meta:
model = User
fields = ('name', 'picture', 'phone', 'email', 'uuid')
from wallet.serializers import WalletSerializer
class FullUserSerializer(UserSerializer):
wallet = WalletSerializer()
class Meta:
model = User
fields = ('uuid', 'name', 'picture', 'location', 'username', 'last_login', 'wallet', 'email', 'phone')
|
4cdf5be2a3c01e1b16a5e49bdf770f9d8573e16e
|
icekit/utils/testing.py
|
icekit/utils/testing.py
|
# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from django.core.files.base import ContentFile
from PIL import Image
from StringIO import StringIO
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
|
# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from PIL import Image
from django.core.files.base import ContentFile
from django.utils import six
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = six.StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
|
Update StringIO import for Python3 compat
|
Update StringIO import for Python3 compat
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from django.core.files.base import ContentFile
from PIL import Image
from StringIO import StringIO
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
Update StringIO import for Python3 compat
|
# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from PIL import Image
from django.core.files.base import ContentFile
from django.utils import six
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = six.StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
|
<commit_before># USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from django.core.files.base import ContentFile
from PIL import Image
from StringIO import StringIO
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
<commit_msg>Update StringIO import for Python3 compat<commit_after>
|
# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from PIL import Image
from django.core.files.base import ContentFile
from django.utils import six
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = six.StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
|
# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from django.core.files.base import ContentFile
from PIL import Image
from StringIO import StringIO
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
Update StringIO import for Python3 compat# USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from PIL import Image
from django.core.files.base import ContentFile
from django.utils import six
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = six.StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
|
<commit_before># USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from django.core.files.base import ContentFile
from PIL import Image
from StringIO import StringIO
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
<commit_msg>Update StringIO import for Python3 compat<commit_after># USEFUL FUNCTIONS DESIGNED FOR TESTS ##############################################################
import glob
import os
import uuid
from PIL import Image
from django.core.files.base import ContentFile
from django.utils import six
def new_test_image():
"""
Creates an automatically generated test image.
In your testing `tearDown` method make sure to delete the test
image with the helper function `delete_test_image`.
The recommended way of using this helper function is as follows:
object_1.image_property.save(*new_test_image())
:return: Image name and image content file.
"""
image_name = 'test-{}.png'.format(uuid.uuid4())
image_buf = six.StringIO()
image = Image.new('RGBA', size=(50, 50), color=(256, 0, 0))
image.save(image_buf, 'png')
image_buf.seek(0)
return image_name, ContentFile(image_buf.read(), image_name)
def delete_test_image(image_field):
"""
Deletes test image generated as well as thumbnails if created.
The recommended way of using this helper function is as follows:
delete_test_image(object_1.image_property)
:param image_field: The image field on an object.
:return: None.
"""
# ensure all thumbs are deleted
for filename in glob.glob(
os.path.join('public', 'media', 'thumbs', image_field.name) + '*'):
os.unlink(filename)
# delete the saved file
image_field.delete()
|
b6e393271971426506557a208be93d8b79d55cc3
|
examples/image_captioning/download.py
|
examples/image_captioning/download.py
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
Fix error type for Python 2
|
Fix error type for Python 2
|
Python
|
mit
|
chainer/chainer,ktnyt/chainer,hvy/chainer,aonotas/chainer,wkentaro/chainer,tkerola/chainer,chainer/chainer,keisuke-umezawa/chainer,niboshi/chainer,okuta/chainer,niboshi/chainer,ktnyt/chainer,rezoo/chainer,jnishi/chainer,okuta/chainer,hvy/chainer,jnishi/chainer,wkentaro/chainer,hvy/chainer,wkentaro/chainer,jnishi/chainer,niboshi/chainer,chainer/chainer,anaruse/chainer,ktnyt/chainer,wkentaro/chainer,niboshi/chainer,pfnet/chainer,jnishi/chainer,hvy/chainer,keisuke-umezawa/chainer,ktnyt/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,okuta/chainer,chainer/chainer,ronekko/chainer,okuta/chainer
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
Fix error type for Python 2
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
<commit_before>import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
<commit_msg>Fix error type for Python 2<commit_after>
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
Fix error type for Python 2import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
<commit_before>import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except FileExistsError:
raise FileExistsError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
<commit_msg>Fix error type for Python 2<commit_after>import argparse
import os
from six.moves.urllib import request
import zipfile
"""Download the MSCOCO dataset (images and captions)."""
urls = [
'http://images.cocodataset.org/zips/train2014.zip',
'http://images.cocodataset.org/zips/val2014.zip',
'http://images.cocodataset.org/annotations/annotations_trainval2014.zip'
]
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--out', type=str, default='data')
args = parser.parse_args()
try:
os.makedirs(args.out)
except OSError:
raise OSError(
"'{}' already exists, delete it and try again".format(args.out))
for url in urls:
print('Downloading {}...'.format(url))
# Download the zip file
file_name = os.path.basename(url)
dst_file_path = os.path.join(args.out, file_name)
request.urlretrieve(url, dst_file_path)
# Unzip the file
zf = zipfile.ZipFile(dst_file_path)
for name in zf.namelist():
dirname, filename = os.path.split(name)
if not filename == '':
zf.extract(name, args.out)
# Remove the zip file since it has been extracted
os.remove(dst_file_path)
|
9eec48753b2643d25d3ce1e143125b29351e0804
|
features/environment.py
|
features/environment.py
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
Add support for arguments in request() in tests
|
Add support for arguments in request() in tests
|
Python
|
mit
|
m4tx/techswarm-server
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
Add support for arguments in request() in tests
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
<commit_before>import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
<commit_msg>Add support for arguments in request() in tests<commit_after>
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
Add support for arguments in request() in testsimport os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
<commit_before>import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET'):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
<commit_msg>Add support for arguments in request() in tests<commit_after>import os
import tempfile
from flask import json
import tsserver
# If set to True, each time the test is run, new database is created as a
# temporary file. If the value is equal to False, tests will be using SQLite
# in-memory database.
USE_DB_TEMP_FILE = False
def before_scenario(context, scenario):
if USE_DB_TEMP_FILE:
context.db_fd, context.db_url = tempfile.mkstemp()
db_url = 'sqlite:///' + context.db_url
else:
db_url = 'sqlite://'
tsserver.app.config['SQLALCHEMY_DATABASE_URI'] = db_url
# Ensure the tests are actually run in temporary database
assert str(tsserver.db.engine.url) == db_url
tsserver.app.config['TESTING'] = True
tsserver.db.create_all()
context.app = tsserver.app.test_client()
def request(url, method='GET', *args, **kwargs):
"""
Wrapper over Flask.open function that parses returned data as JSON
:param method: HTTP method to be used. GET is used by default
:param url: URL to retrieve
:return: Response object
"""
rv = context.app.open(url, method=method, *args, **kwargs)
rv.json_data = json.loads(rv.data)
return rv
context.request = request
def after_scenario(context, scenario):
if USE_DB_TEMP_FILE:
os.close(context.db_fd)
os.unlink(context.db_url)
|
2d15ff38abb68335daa8bb2b94aaeff91ed829a2
|
photoshell/__main__.py
|
photoshell/__main__.py
|
import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
|
import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
config = dict(
{
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell')
}
)
if os.path.isfile(config_path):
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
else:
with open(config_path, 'w+') as config_file:
yaml.dump(config, config_file, default_flow_style=False)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
|
Create default config if one doesn't exist
|
Create default config if one doesn't exist
Fixes #20
|
Python
|
mit
|
photoshell/photoshell,SamWhited/photoshell,campaul/photoshell
|
import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
Create default config if one doesn't exist
Fixes #20
|
import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
config = dict(
{
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell')
}
)
if os.path.isfile(config_path):
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
else:
with open(config_path, 'w+') as config_file:
yaml.dump(config, config_file, default_flow_style=False)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
|
<commit_before>import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
<commit_msg>Create default config if one doesn't exist
Fixes #20<commit_after>
|
import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
config = dict(
{
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell')
}
)
if os.path.isfile(config_path):
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
else:
with open(config_path, 'w+') as config_file:
yaml.dump(config, config_file, default_flow_style=False)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
|
import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
Create default config if one doesn't exist
Fixes #20import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
config = dict(
{
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell')
}
)
if os.path.isfile(config_path):
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
else:
with open(config_path, 'w+') as config_file:
yaml.dump(config, config_file, default_flow_style=False)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
|
<commit_before>import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
<commit_msg>Create default config if one doesn't exist
Fixes #20<commit_after>import os
import sys
import yaml
from photoshell import ui
config_path = os.path.join(os.environ['HOME'], '.photoshell.yaml')
config = dict(
{
'library': os.path.join(os.environ['HOME'], 'Pictures/Photoshell')
}
)
if os.path.isfile(config_path):
with open(config_path, 'r') as config_file:
config = yaml.load(config_file)
else:
with open(config_path, 'w+') as config_file:
yaml.dump(config, config_file, default_flow_style=False)
print('Libray path is {0}'.format(config['library']))
# Open photo viewer
ui.render(config['library'])
|
20a801255ab505641e1ec0d449a4b36411c673bc
|
indra/tests/test_tas.py
|
indra/tests/test_tas.py
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
Update test for current evidence aggregation
|
Update test for current evidence aggregation
|
Python
|
bsd-2-clause
|
sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,bgyori/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/indra,bgyori/indra
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
Update test for current evidence aggregation
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
<commit_before>from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
<commit_msg>Update test for current evidence aggregation<commit_after>
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
Update test for current evidence aggregationfrom nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
<commit_before>from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1601159, num_stmts
assert all(len(s.evidence) == 1 for s in tp.statements), \
"Some statements lack evidence, or have extra evidence."
<commit_msg>Update test for current evidence aggregation<commit_after>from nose.plugins.attrib import attr
from indra.sources.tas import process_from_web
@attr('slow')
def test_processor():
tp = process_from_web(affinity_class_limit=10)
assert tp
assert tp.statements
num_stmts = len(tp.statements)
# This is the total number of statements about human genes
assert num_stmts == 1175682, num_stmts
assert all(len(s.evidence) >= 1 for s in tp.statements), \
'Some statements lack any evidence'
|
5c7c2f87330aae72e4b30be7f4a9867e51793cf6
|
foosball/games/forms.py
|
foosball/games/forms.py
|
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
|
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget,
'score': forms.Select(choices=((i, i) for i in range(11)))
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
|
Change Game form score input to select
|
Change Game form score input to select
|
Python
|
mit
|
andersinno/foosball,andersinno/foosball,andersinno/foosball,andersinno/foosball
|
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
Change Game form score input to select
|
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget,
'score': forms.Select(choices=((i, i) for i in range(11)))
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
|
<commit_before>from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
<commit_msg>Change Game form score input to select<commit_after>
|
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget,
'score': forms.Select(choices=((i, i) for i in range(11)))
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
|
from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
Change Game form score input to selectfrom django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget,
'score': forms.Select(choices=((i, i) for i in range(11)))
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
|
<commit_before>from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
<commit_msg>Change Game form score input to select<commit_after>from django import forms
from django_select2.forms import ModelSelect2MultipleWidget
from django_superform import ModelFormField, SuperForm
from .models import Team, Game
from .utils import clean_team_forms
from foosball.users.models import User
class MultiPlayerWidget(ModelSelect2MultipleWidget):
model = User
search_fields = [
'username__icontains',
'first_name__icontains',
'last_name__icontains',
'email__icontains',
]
def build_attrs(self, extra_attrs=None, **kwargs):
attrs = super().build_attrs(extra_attrs=extra_attrs, **kwargs)
attrs['data-maximum-selection-length'] = 2
return attrs
def label_from_instance(self, obj):
return " - ".join(filter(None, [obj.username, obj.name]))
class TeamModelForm(forms.ModelForm):
class Meta:
model = Team
fields = ('score', 'players')
widgets = {
'players': MultiPlayerWidget,
'score': forms.Select(choices=((i, i) for i in range(11)))
}
class GameModelForm(forms.ModelForm):
class Meta:
model = Game
fields = ('played_at', 'table')
class GameForm(SuperForm):
game = ModelFormField(GameModelForm)
team1 = ModelFormField(TeamModelForm)
team2 = ModelFormField(TeamModelForm)
def is_valid(self):
return super().is_valid() & clean_team_forms(self.forms['team1'], self.forms['team2'])
|
9c1354c0d14599872e3f87ddc4eaac7dc2d8e760
|
plugins/buddy/buddy.py
|
plugins/buddy/buddy.py
|
import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
|
import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty.dawson", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
|
Update slack username for JD
|
Update slack username for JD
|
Python
|
mit
|
eegeo/python-rtmbot
|
import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
Update slack username for JD
|
import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty.dawson", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
|
<commit_before>import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
<commit_msg>Update slack username for JD<commit_after>
|
import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty.dawson", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
|
import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
Update slack username for JDimport time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty.dawson", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
|
<commit_before>import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
<commit_msg>Update slack username for JD<commit_after>import time
import random
crontable = []
outputs = []
buddies = ["tim.jenks", "mark.simpson", "scott", "malcolm.brown", "ian.hutchinson", "jonty.dawson", "oliver.norton", "vimarsh.raina", "paul.harris", "john.bell"]
cursor = -1
def usage():
return "usage: :cow: buddy"
def commandname():
return "buddy"
def process_message(data, plugin):
global cursor
if "text" in data:
splits = data['text'].split(" ")
if splits[0] == ":cow:":
if splits[1] == commandname():
try:
while True:
cursor = cursor + 1
if cursor > len(buddies)-1:
cursor = 0
the_buddy = buddies[cursor]
user_id = data['user'] #requester's UID
slack_client = plugin.getbot().get_slack_client()
user = slack_client.server.users.find(user_id)
if not user == None:
if not the_buddy == user.name:
break
else:
break
outputs.append([data['channel'], "*cow kicks*: @{0}".format(the_buddy)])
except:
raise
outputs.append([data['channel'], usage()])
|
cfc15200ff6c96762379ddeef1aeda9e73a48c12
|
pollers/alarmpoller.py
|
pollers/alarmpoller.py
|
import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
logging.warn("Alarm triggered")
# Notify
self._wasTriggered = isTriggered
|
import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def onStateChange(self, s7conn, isArmed):
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
def onTriggered(self, s7conn):
alarmed = [ ]
for detector in alarm.getDetectors(s7conn):
if detector.isTriggered():
alarmed.append(detector.getName())
if not alarmed:
logging.error("Alarm is triggered, but no detector is active!")
msg = "Alarm! Detectie in %s." % (", ".join(alarmed))
logging.warn(msg)
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
self.onStateChange(s7conn, isArmed)
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
self.onTriggered(s7conn)
self._wasTriggered = isTriggered
|
Move the notification code to separate methods
|
Move the notification code to separate methods
Dealing with state changes is likely to become a significant bit of
code, so move it out into separate methods.
Right now we only log these events, but at least the 'alarm becomes
triggered' one is going to need to do active notification to the user(s).
|
Python
|
bsd-2-clause
|
kprovost/domotica,kprovost/domotica
|
import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
logging.warn("Alarm triggered")
# Notify
self._wasTriggered = isTriggered
Move the notification code to separate methods
Dealing with state changes is likely to become a significant bit of
code, so move it out into separate methods.
Right now we only log these events, but at least the 'alarm becomes
triggered' one is going to need to do active notification to the user(s).
|
import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def onStateChange(self, s7conn, isArmed):
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
def onTriggered(self, s7conn):
alarmed = [ ]
for detector in alarm.getDetectors(s7conn):
if detector.isTriggered():
alarmed.append(detector.getName())
if not alarmed:
logging.error("Alarm is triggered, but no detector is active!")
msg = "Alarm! Detectie in %s." % (", ".join(alarmed))
logging.warn(msg)
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
self.onStateChange(s7conn, isArmed)
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
self.onTriggered(s7conn)
self._wasTriggered = isTriggered
|
<commit_before>import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
logging.warn("Alarm triggered")
# Notify
self._wasTriggered = isTriggered
<commit_msg>Move the notification code to separate methods
Dealing with state changes is likely to become a significant bit of
code, so move it out into separate methods.
Right now we only log these events, but at least the 'alarm becomes
triggered' one is going to need to do active notification to the user(s).<commit_after>
|
import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def onStateChange(self, s7conn, isArmed):
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
def onTriggered(self, s7conn):
alarmed = [ ]
for detector in alarm.getDetectors(s7conn):
if detector.isTriggered():
alarmed.append(detector.getName())
if not alarmed:
logging.error("Alarm is triggered, but no detector is active!")
msg = "Alarm! Detectie in %s." % (", ".join(alarmed))
logging.warn(msg)
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
self.onStateChange(s7conn, isArmed)
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
self.onTriggered(s7conn)
self._wasTriggered = isTriggered
|
import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
logging.warn("Alarm triggered")
# Notify
self._wasTriggered = isTriggered
Move the notification code to separate methods
Dealing with state changes is likely to become a significant bit of
code, so move it out into separate methods.
Right now we only log these events, but at least the 'alarm becomes
triggered' one is going to need to do active notification to the user(s).import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def onStateChange(self, s7conn, isArmed):
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
def onTriggered(self, s7conn):
alarmed = [ ]
for detector in alarm.getDetectors(s7conn):
if detector.isTriggered():
alarmed.append(detector.getName())
if not alarmed:
logging.error("Alarm is triggered, but no detector is active!")
msg = "Alarm! Detectie in %s." % (", ".join(alarmed))
logging.warn(msg)
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
self.onStateChange(s7conn, isArmed)
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
self.onTriggered(s7conn)
self._wasTriggered = isTriggered
|
<commit_before>import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
logging.warn("Alarm triggered")
# Notify
self._wasTriggered = isTriggered
<commit_msg>Move the notification code to separate methods
Dealing with state changes is likely to become a significant bit of
code, so move it out into separate methods.
Right now we only log these events, but at least the 'alarm becomes
triggered' one is going to need to do active notification to the user(s).<commit_after>import domotica.alarm as alarm
import logging
from poller import Poller
import s7
class AlarmPoller(Poller):
def __init__(self):
self._wasArmed = None
self._wasTriggered = None
def onStateChange(self, s7conn, isArmed):
if isArmed:
logging.info("Alarm activated")
else:
logging.info("Alarm deactivated")
def onTriggered(self, s7conn):
alarmed = [ ]
for detector in alarm.getDetectors(s7conn):
if detector.isTriggered():
alarmed.append(detector.getName())
if not alarmed:
logging.error("Alarm is triggered, but no detector is active!")
msg = "Alarm! Detectie in %s." % (", ".join(alarmed))
logging.warn(msg)
def poll(self, s7conn):
a = alarm.Alarm(s7conn)
isArmed = a.isArmed()
logging.debug("Alarm armed status: %s" % isArmed)
if self._wasArmed is not None and self._wasArmed != isArmed:
self.onStateChange(s7conn, isArmed)
self._wasArmed = isArmed
if not isArmed:
self._wasTriggered = False
return
isTriggered = a.isAlarmTriggered()
logging.debug("Alarm trigger status: %s" % isTriggered)
if self._wasTriggered is not None and isTriggered \
and self._wasTriggered != isTriggered:
self.onTriggered(s7conn)
self._wasTriggered = isTriggered
|
f0e71bdeca1a553c05228b57366a46c25db3d632
|
threema/gateway/util.py
|
threema/gateway/util.py
|
"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
|
"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
"""
Decode a hex-encoded key or read it from a file.
Arguments:
- `key`: A hex-encoded key or the name of a file which contains
a key.
- `expected_type`: One of the types of :class:`Key.Type`.
Return a:class:`libnacl.public.SecretKey` or
:class:`libnacl.public.PublicKey` instance.
"""
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
|
Add missing docstring for read_key_or_key_file
|
Add missing docstring for read_key_or_key_file
|
Python
|
mit
|
lgrahl/threema-msgapi-sdk-python,threema-ch/threema-msgapi-sdk-python
|
"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
Add missing docstring for read_key_or_key_file
|
"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
"""
Decode a hex-encoded key or read it from a file.
Arguments:
- `key`: A hex-encoded key or the name of a file which contains
a key.
- `expected_type`: One of the types of :class:`Key.Type`.
Return a:class:`libnacl.public.SecretKey` or
:class:`libnacl.public.PublicKey` instance.
"""
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
|
<commit_before>"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
<commit_msg>Add missing docstring for read_key_or_key_file<commit_after>
|
"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
"""
Decode a hex-encoded key or read it from a file.
Arguments:
- `key`: A hex-encoded key or the name of a file which contains
a key.
- `expected_type`: One of the types of :class:`Key.Type`.
Return a:class:`libnacl.public.SecretKey` or
:class:`libnacl.public.PublicKey` instance.
"""
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
|
"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
Add missing docstring for read_key_or_key_file"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
"""
Decode a hex-encoded key or read it from a file.
Arguments:
- `key`: A hex-encoded key or the name of a file which contains
a key.
- `expected_type`: One of the types of :class:`Key.Type`.
Return a:class:`libnacl.public.SecretKey` or
:class:`libnacl.public.PublicKey` instance.
"""
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
|
<commit_before>"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
<commit_msg>Add missing docstring for read_key_or_key_file<commit_after>"""
Utility functions.
"""
from threema.gateway.key import Key
__all__ = ('read_key_or_key_file',)
def read_key_or_key_file(key, expected_type):
"""
Decode a hex-encoded key or read it from a file.
Arguments:
- `key`: A hex-encoded key or the name of a file which contains
a key.
- `expected_type`: One of the types of :class:`Key.Type`.
Return a:class:`libnacl.public.SecretKey` or
:class:`libnacl.public.PublicKey` instance.
"""
# Read key file (if any)
try:
with open(key) as file:
key = file.readline().strip()
except IOError:
pass
# Convert to key instance
return Key.decode(key, expected_type)
|
fcb86792af4738ade1422f996397d8b96f0c54c5
|
scripts/mc_add_observation.py
|
scripts/mc_add_observation.py
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import os
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uv files to extract')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uvh5 files to add')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
|
Fix flake8 issue and fix up documentation
|
Fix flake8 issue and fix up documentation
|
Python
|
bsd-2-clause
|
HERA-Team/hera_mc,HERA-Team/hera_mc,HERA-Team/Monitor_and_Control
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import os
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uv files to extract')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
Fix flake8 issue and fix up documentation
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uvh5 files to add')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
|
<commit_before>#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import os
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uv files to extract')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
<commit_msg>Fix flake8 issue and fix up documentation<commit_after>
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uvh5 files to add')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
|
#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import os
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uv files to extract')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
Fix flake8 issue and fix up documentation#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uvh5 files to add')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
|
<commit_before>#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import os
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uv files to extract')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
<commit_msg>Fix flake8 issue and fix up documentation<commit_after>#! /usr/bin/env python
# -*- mode: python; coding: utf-8 -*-
# Copyright 2017 the HERA Collaboration
# Licensed under the 2-clause BSD license.
import numpy as np
from astropy.time import Time
from pyuvdata import UVData
from hera_mc import mc
a = mc.get_mc_argument_parser()
a.description = """Read the obsid from a file and create a record in M&C."""
a.add_argument('files', metavar='file', type=str, nargs='*', default=[],
help='*.uvh5 files to add')
args = a.parse_args()
db = mc.connect_to_mc_db(args)
for uvfile in args.files:
# assume our data file is uvh5
uv = UVData()
uv.read_uvh5(uvfile, read_data=False)
times = np.unique(uv.time_array)
starttime = Time(times[0], scale='utc', format='jd')
stoptime = Time(times[-1], scale='utc', format='jd')
obsid = int(np.floor(starttime.gps))
with db.sessionmaker() as session:
obs = session.get_obs(obsid)
if len(obs) > 0:
print("observation {obs} already in M&C, skipping".format(obs=obsid))
continue
print("Inserting obsid into M&C:" + str(obsid))
session.add_obs(starttime, stoptime, obsid)
session.commit()
|
70441c75eacd3e71c5e3a0f4db1cc0712729e50f
|
Python/pizza/pizza_roulette.py
|
Python/pizza/pizza_roulette.py
|
#!/usr/bin/env python
import codecs
import random
import os
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
|
#!/usr/bin/env python
import codecs
import random
import os
import sys
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
if "meat" in sys.argv :
filename = dirname + "/meat"
with open(filename) as ingredients:
content = content + ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
|
Add option to get meat
|
Add option to get meat
|
Python
|
mit
|
hjorthjort/scripts,hjorthjort/scripts
|
#!/usr/bin/env python
import codecs
import random
import os
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
Add option to get meat
|
#!/usr/bin/env python
import codecs
import random
import os
import sys
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
if "meat" in sys.argv :
filename = dirname + "/meat"
with open(filename) as ingredients:
content = content + ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
|
<commit_before>#!/usr/bin/env python
import codecs
import random
import os
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
<commit_msg>Add option to get meat<commit_after>
|
#!/usr/bin/env python
import codecs
import random
import os
import sys
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
if "meat" in sys.argv :
filename = dirname + "/meat"
with open(filename) as ingredients:
content = content + ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
|
#!/usr/bin/env python
import codecs
import random
import os
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
Add option to get meat#!/usr/bin/env python
import codecs
import random
import os
import sys
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
if "meat" in sys.argv :
filename = dirname + "/meat"
with open(filename) as ingredients:
content = content + ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
|
<commit_before>#!/usr/bin/env python
import codecs
import random
import os
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
<commit_msg>Add option to get meat<commit_after>#!/usr/bin/env python
import codecs
import random
import os
import sys
dirname = os.path.dirname(os.path.realpath(__file__))
MIN_INGRED = 2
MAX_INGRED = 8
filename = dirname + "/vegetarian"
with open(filename) as ingredients:
content = ingredients.read().splitlines()
if "meat" in sys.argv :
filename = dirname + "/meat"
with open(filename) as ingredients:
content = content + ingredients.read().splitlines()
roulette_result = []
roulette_tries = random.randint(2,8)
already_selected = []
for i in range(0, roulette_tries):
select = random.randint(0, len(content) -1)
while select in already_selected:
select = random.randint(0, len(content) -1)
already_selected.append(select)
roulette_result.append(content[select])
for string in roulette_result:
print string.decode('utf-8', 'ignore')
|
70a997c2991ea306a40054be8e2e93361ef9c702
|
src/globus_sdk/services/gcs/errors.py
|
src/globus_sdk/services/gcs/errors.py
|
from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if self.detail and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
|
from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if isinstance(self.detail, dict) and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
|
Add a missing isinstance check to pacify pyright
|
Add a missing isinstance check to pacify pyright
pyright (correctly) complains that we use `detail["DATA_TYPE"]` in GCS
error parsing without knowing that `detail` is a type which supports
strings in `__getitem__`.
Check if `detail` is a dict before indexing into it.
|
Python
|
apache-2.0
|
globus/globus-sdk-python,globus/globus-sdk-python,sirosen/globus-sdk-python
|
from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if self.detail and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
Add a missing isinstance check to pacify pyright
pyright (correctly) complains that we use `detail["DATA_TYPE"]` in GCS
error parsing without knowing that `detail` is a type which supports
strings in `__getitem__`.
Check if `detail` is a dict before indexing into it.
|
from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if isinstance(self.detail, dict) and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
|
<commit_before>from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if self.detail and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
<commit_msg>Add a missing isinstance check to pacify pyright
pyright (correctly) complains that we use `detail["DATA_TYPE"]` in GCS
error parsing without knowing that `detail` is a type which supports
strings in `__getitem__`.
Check if `detail` is a dict before indexing into it.<commit_after>
|
from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if isinstance(self.detail, dict) and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
|
from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if self.detail and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
Add a missing isinstance check to pacify pyright
pyright (correctly) complains that we use `detail["DATA_TYPE"]` in GCS
error parsing without knowing that `detail` is a type which supports
strings in `__getitem__`.
Check if `detail` is a dict before indexing into it.from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if isinstance(self.detail, dict) and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
|
<commit_before>from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if self.detail and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
<commit_msg>Add a missing isinstance check to pacify pyright
pyright (correctly) complains that we use `detail["DATA_TYPE"]` in GCS
error parsing without knowing that `detail` is a type which supports
strings in `__getitem__`.
Check if `detail` is a dict before indexing into it.<commit_after>from typing import Any, List, Optional, Union
import requests
from globus_sdk import exc
class GCSAPIError(exc.GlobusAPIError):
"""
Error class for the GCS Manager API client
"""
def __init__(self, r: requests.Response) -> None:
self.detail_data_type: Optional[str] = None
self.detail: Union[None, str, dict] = None
super().__init__(r)
def _get_args(self) -> List[Any]:
args = super()._get_args()
args.append(self.detail_data_type)
# only add detail if it's a string (don't want to put a large object into
# stacktraces)
if isinstance(self.detail, str):
args.append(self.detail)
return args
def _load_from_json(self, data: dict) -> None:
super()._load_from_json(data)
# detail can be a full document, so fetch, then look for a DATA_TYPE
# and expose it as a top-level attribute for easy access
self.detail = data.get("detail")
if isinstance(self.detail, dict) and "DATA_TYPE" in self.detail:
self.detail_data_type = self.detail["DATA_TYPE"]
|
13ffdb0cb455bf32a10d055e6e972c0ca725557a
|
src/mmw/apps/home/views.py
|
src/mmw/apps/home/views.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
Return a csrf token on the homepage.
|
Return a csrf token on the homepage.
We were not setting a CSRF token on the homepage. This meant that requests to
API endpoints did not have a token available. This change sets the token
immediatley as part of the cookie. Ajax calls can then use this value.
|
Python
|
apache-2.0
|
WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,kdeloach/model-my-watershed,mmcfarland/model-my-watershed-1,lewfish/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,lewfish/model-my-watershed,mmcfarland/model-my-watershed-1,lewfish/model-my-watershed,WikiWatershed/model-my-watershed,lewfish/model-my-watershed,lewfish/model-my-watershed,lliss/model-my-watershed,mmcfarland/model-my-watershed-1,lliss/model-my-watershed,mmcfarland/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,WikiWatershed/model-my-watershed,mmcfarland/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed,lliss/model-my-watershed,mmcfarland/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed-1,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,project-icp/bee-pollinator-app,mmcfarland/model-my-watershed,kdeloach/model-my-watershed
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
Return a csrf token on the homepage.
We were not setting a CSRF token on the homepage. This meant that requests to
API endpoints did not have a token available. This change sets the token
immediatley as part of the cookie. Ajax calls can then use this value.
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
<commit_msg>Return a csrf token on the homepage.
We were not setting a CSRF token on the homepage. This meant that requests to
API endpoints did not have a token available. This change sets the token
immediatley as part of the cookie. Ajax calls can then use this value.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
Return a csrf token on the homepage.
We were not setting a CSRF token on the homepage. This meant that requests to
API endpoints did not have a token available. This change sets the token
immediatley as part of the cookie. Ajax calls can then use this value.# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
return render_to_response('home/home.html')
def compare(request):
return render_to_response('home/compare.html')
<commit_msg>Return a csrf token on the homepage.
We were not setting a CSRF token on the homepage. This meant that requests to
API endpoints did not have a token available. This change sets the token
immediatley as part of the cookie. Ajax calls can then use this value.<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.template.context_processors import csrf
from rest_framework import serializers, viewsets
# Serializers define the API representation.
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
# ViewSets define the view behavior.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
def home_page(request):
csrf_token = {}
csrf_token.update(csrf(request))
return render_to_response('home/home.html', csrf_token)
def compare(request):
return render_to_response('home/compare.html')
|
a1a312dc71cd7b32a0d39f85a9b2fe45ee57892c
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[ "requests", ],
)
|
Add dependency on requets module.
|
Add dependency on requets module.
|
Python
|
mit
|
grundleborg/pykismet3
|
import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add dependency on requets module.
|
import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[ "requests", ],
)
|
<commit_before>import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add dependency on requets module.<commit_after>
|
import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[ "requests", ],
)
|
import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add dependency on requets module.import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[ "requests", ],
)
|
<commit_before>import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add dependency on requets module.<commit_after>import os
from setuptools import setup
def read(*paths):
"""Build a file path from *paths* and return the contents."""
with open(os.path.join(*paths), 'r') as f:
return f.read()
setup(
name='pykismet3',
version='0.1.0',
description='A Python 3 module for the Akismet spam comment-spam-detection web service.',
long_description=(read('README.md')),
url='https://github.com/grundleborg/pykismet',
license='MIT',
author='George Goldberg',
author_email='george@grundleborg.com',
py_modules=['pykismet3'],
include_package_data=True,
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=[ "requests", ],
)
|
14ea1e8ade33d42497d6ca9d11ca9b1c2b00614b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
],
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
author=META['author'],
author_email=META['email']
)
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
'epp-proxyd = neuemux.proxyd:main',
'epp-muxd = neuemux.muxd:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: System :: Networking',
],
author=META['author'],
author_email=META['email']
)
|
Add missing entry points and classifiers.
|
Add missing entry points and classifiers.
|
Python
|
mit
|
kgaughan/neuemux
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
],
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
author=META['author'],
author_email=META['email']
)
Add missing entry points and classifiers.
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
'epp-proxyd = neuemux.proxyd:main',
'epp-muxd = neuemux.muxd:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: System :: Networking',
],
author=META['author'],
author_email=META['email']
)
|
<commit_before>#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
],
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
author=META['author'],
author_email=META['email']
)
<commit_msg>Add missing entry points and classifiers.<commit_after>
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
'epp-proxyd = neuemux.proxyd:main',
'epp-muxd = neuemux.muxd:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: System :: Networking',
],
author=META['author'],
author_email=META['email']
)
|
#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
],
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
author=META['author'],
author_email=META['email']
)
Add missing entry points and classifiers.#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
'epp-proxyd = neuemux.proxyd:main',
'epp-muxd = neuemux.muxd:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: System :: Networking',
],
author=META['author'],
author_email=META['email']
)
|
<commit_before>#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
],
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
author=META['author'],
author_email=META['email']
)
<commit_msg>Add missing entry points and classifiers.<commit_after>#!/usr/bin/env python
from __future__ import with_statement
from setuptools import setup, find_packages
from buildkit import *
META = get_metadata('neuemux/version.py')
setup(
name='neuemux',
version=META['version'],
description='EPP reverse proxy daemons',
long_description=read('README'),
url='https://github.com/kgaughan/neuemux/',
license='MIT',
packages=find_packages(exclude='tests'),
zip_safe=False,
install_requires=read_requirements('requirements.txt'),
include_package_data=True,
entry_points={
'console_scripts': [
'epp-proxyd = neuemux.proxyd:main',
'epp-muxd = neuemux.muxd:main',
],
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet',
'Topic :: System :: Networking',
],
author=META['author'],
author_email=META['email']
)
|
247419dce957ad1fa8c1c61a0e13d857b0d8f038
|
setup.py
|
setup.py
|
#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
|
#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse.py',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
|
Change PyPi name to giturlparse.py
|
Change PyPi name to giturlparse.py
|
Python
|
apache-2.0
|
FriendCode/giturlparse.py,yakky/giturlparse.py,yakky/giturlparse
|
#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
Change PyPi name to giturlparse.py
|
#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse.py',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
|
<commit_before>#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
<commit_msg>Change PyPi name to giturlparse.py<commit_after>
|
#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse.py',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
|
#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
Change PyPi name to giturlparse.py#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse.py',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
|
<commit_before>#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
<commit_msg>Change PyPi name to giturlparse.py<commit_after>#!/usr/bin/python
try:
from setuptools import setup, Extension
has_setuptools = True
except ImportError:
from distutils.core import setup, Extension
has_setuptools = False
version_string = '0.0.1'
setup_kwargs = {}
# Requirements
install_requires = [
# PyPi
# Non PyPi
]
dependency_links = [
]
setup(name='giturlparse.py',
description='A Git URL parsing module (supports parsing and rewriting)',
keywords='git url parse ssh github bitbucket',
version=version_string,
url='https://github.com/FriendCode/giturlparse.py',
license='Apache v2',
author="Aaron O'Mullan",
author_email='aaron@friendco.de',
long_description="""
""",
packages=['giturlparse', 'giturlparse.platforms'],
install_requires=install_requires,
dependency_links=dependency_links,
**setup_kwargs
)
|
ed731480d0266b0158232bd1c7acda97d4f43ba7
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
)
|
from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
install_requires=(
'Django>=1.8',
),
)
|
Update Django requirement to latest LTS
|
Update Django requirement to latest LTS
|
Python
|
bsd-3-clause
|
lamby/django-directed-edge
|
from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
)
Update Django requirement to latest LTS
|
from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
install_requires=(
'Django>=1.8',
),
)
|
<commit_before>from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
)
<commit_msg>Update Django requirement to latest LTS<commit_after>
|
from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
install_requires=(
'Django>=1.8',
),
)
|
from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
)
Update Django requirement to latest LTSfrom setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
install_requires=(
'Django>=1.8',
),
)
|
<commit_before>from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
)
<commit_msg>Update Django requirement to latest LTS<commit_after>from setuptools import setup
setup(
name='django-directed-edge',
version='2.0.1',
packages=(
'django_directed_edge',
),
url='https://chris-lamb.co.uk/projects/django-directed-edge',
author="Chris Lamb",
author_email="chris@chris-lamb.co.uk",
description="Helpful Django-oriented sugar around around DirectedEdge's Python API",
install_requires=(
'Django>=1.8',
),
)
|
b887ce4b8ffdd25f4a8d3dc4f81bc7fa340272ae
|
setup.py
|
setup.py
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: https://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
|
Fix a https url issue
|
Fix a https url issue
Change-Id: I2d2794209620c823f0aef9549f8ee43aa4c91dff
|
Python
|
apache-2.0
|
openstack/senlin,openstack/senlin,openstack/senlin,stackforge/senlin,stackforge/senlin
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
Fix a https url issue
Change-Id: I2d2794209620c823f0aef9549f8ee43aa4c91dff
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: https://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
|
<commit_before># Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
<commit_msg>Fix a https url issue
Change-Id: I2d2794209620c823f0aef9549f8ee43aa4c91dff<commit_after>
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: https://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
|
# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
Fix a https url issue
Change-Id: I2d2794209620c823f0aef9549f8ee43aa4c91dff# Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: https://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
|
<commit_before># Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: http://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
<commit_msg>Fix a https url issue
Change-Id: I2d2794209620c823f0aef9549f8ee43aa4c91dff<commit_after># Copyright (c) 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT
import setuptools
# In python < 2.7.4, a lazy loading of package `pbr` will break
# setuptools if some other modules registered functions in `atexit`.
# solution from: https://bugs.python.org/issue15881#msg170215
try:
import multiprocessing # noqa
except ImportError:
pass
setuptools.setup(
setup_requires=['pbr>=2.0.0'],
pbr=True)
|
6fd16f646712d3648b52c7b1c3ca3380e29d87fd
|
setup.py
|
setup.py
|
#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
|
#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7.1',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
package_data={'hxl': ['*.csv']},
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
|
Make sure we install the csv file, still.
|
Make sure we install the csv file, still.
|
Python
|
unlicense
|
HXLStandard/libhxl-python,HXLStandard/libhxl-python
|
#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
Make sure we install the csv file, still.
|
#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7.1',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
package_data={'hxl': ['*.csv']},
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
<commit_msg>Make sure we install the csv file, still.<commit_after>
|
#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7.1',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
package_data={'hxl': ['*.csv']},
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
|
#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
Make sure we install the csv file, still.#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7.1',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
package_data={'hxl': ['*.csv']},
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
|
<commit_before>#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
<commit_msg>Make sure we install the csv file, still.<commit_after>#!/usr/bin/python
from setuptools import setup
setup(name='libhxl',
version='2.7.1',
description='Python support for the Humanitarian Exchange Language (HXL).',
author='David Megginson',
author_email='contact@megginson.com',
url='http://hxlproject.org',
install_requires=['python-dateutil', 'xlrd', 'requests', 'unidecode'],
packages=['hxl'],
package_data={'hxl': ['*.csv']},
include_package_data=True,
test_suite='tests',
entry_points={
'console_scripts': [
'hxladd = hxl.scripts:hxladd',
'hxlappend = hxl.scripts:hxlappend',
'hxlclean = hxl.scripts:hxlclean',
'hxlcount = hxl.scripts:hxlcount',
'hxlcut = hxl.scripts:hxlcut',
'hxldedup = hxl.scripts:hxldedup',
'hxlmerge = hxl.scripts:hxlmerge',
'hxlrename = hxl.scripts:hxlrename',
'hxlreplace = hxl.scripts:hxlreplace',
'hxlselect = hxl.scripts:hxlselect',
'hxlsort = hxl.scripts:hxlsort',
'hxltag = hxl.scripts:hxltag',
'hxlvalidate = hxl.scripts:hxlvalidate'
]
}
)
|
0f881802b7ce7e19a16b70b88b480eeb30a0affd
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
|
#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'",
"six==1.12.0"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
|
Install six for Python 2 & 3 compatibility
|
Install six for Python 2 & 3 compatibility
|
Python
|
mit
|
mineo/sagbescheid,mineo/sagbescheid
|
#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
Install six for Python 2 & 3 compatibility
|
#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'",
"six==1.12.0"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
|
<commit_before>#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
<commit_msg>Install six for Python 2 & 3 compatibility<commit_after>
|
#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'",
"six==1.12.0"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
|
#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
Install six for Python 2 & 3 compatibility#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'",
"six==1.12.0"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
|
<commit_before>#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
<commit_msg>Install six for Python 2 & 3 compatibility<commit_after>#!/usr/bin/env python2
from setuptools import setup
setup(name="sagbescheid",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["sagbescheid", "sagbescheid.notifiers"],
package_dir={"sagbescheid": "sagbescheid"},
download_url="https://github.com/mineo/sagbescheid/tarball/master",
url="http://github.com/mineo/sagbescheid",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7"],
description="systemd notification daemon",
long_description=open("README.rst").read(),
install_requires=["Twisted[tls]>=15.2.0",
"pyasn1",
"txdbus==1.1.0",
"automat==0.7.0",
"zope.interface==4.6.0",
"systemd-python==234",
"enum34==1.1.6;python_version<'3.4'",
"six==1.12.0"],
setup_requires=["setuptools_scm"],
use_scm_version={"write_to": "sagbescheid/version.py"},
extras_require={
'docs': ['sphinx', 'sphinxcontrib-autoprogram']
}
)
|
a5b7416eed78cb708b1139f1e21d0b193d8a0623
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='MaxwellBloch',
version='0.3.0.dev',
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
|
from setuptools import setup
import subprocess
# Semantic versioning
MAJOR = 0
MINOR = 3
PATCH = 0
IS_RELEASED = False
VERSION = '{0}.{1}.{2}'.format(MAJOR, MINOR, PATCH)
def git_short_hash():
""" Returns the short hash of the latest git commit as a string. """
git_str = subprocess.check_output(['git', 'log', '-1',
'--format=%h']).decode('UTF-8').strip()
return git_str
FULL_VERSION = VERSION
if not IS_RELEASED:
FULL_VERSION += '+' + git_short_hash()
setup(name='MaxwellBloch',
version=FULL_VERSION,
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
|
Add git short hash to unreleased version numbers
|
Add git short hash to unreleased version numbers
|
Python
|
mit
|
tommyogden/maxwellbloch,tommyogden/maxwellbloch
|
from setuptools import setup
setup(name='MaxwellBloch',
version='0.3.0.dev',
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
Add git short hash to unreleased version numbers
|
from setuptools import setup
import subprocess
# Semantic versioning
MAJOR = 0
MINOR = 3
PATCH = 0
IS_RELEASED = False
VERSION = '{0}.{1}.{2}'.format(MAJOR, MINOR, PATCH)
def git_short_hash():
""" Returns the short hash of the latest git commit as a string. """
git_str = subprocess.check_output(['git', 'log', '-1',
'--format=%h']).decode('UTF-8').strip()
return git_str
FULL_VERSION = VERSION
if not IS_RELEASED:
FULL_VERSION += '+' + git_short_hash()
setup(name='MaxwellBloch',
version=FULL_VERSION,
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='MaxwellBloch',
version='0.3.0.dev',
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
<commit_msg>Add git short hash to unreleased version numbers<commit_after>
|
from setuptools import setup
import subprocess
# Semantic versioning
MAJOR = 0
MINOR = 3
PATCH = 0
IS_RELEASED = False
VERSION = '{0}.{1}.{2}'.format(MAJOR, MINOR, PATCH)
def git_short_hash():
""" Returns the short hash of the latest git commit as a string. """
git_str = subprocess.check_output(['git', 'log', '-1',
'--format=%h']).decode('UTF-8').strip()
return git_str
FULL_VERSION = VERSION
if not IS_RELEASED:
FULL_VERSION += '+' + git_short_hash()
setup(name='MaxwellBloch',
version=FULL_VERSION,
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
|
from setuptools import setup
setup(name='MaxwellBloch',
version='0.3.0.dev',
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
Add git short hash to unreleased version numbersfrom setuptools import setup
import subprocess
# Semantic versioning
MAJOR = 0
MINOR = 3
PATCH = 0
IS_RELEASED = False
VERSION = '{0}.{1}.{2}'.format(MAJOR, MINOR, PATCH)
def git_short_hash():
""" Returns the short hash of the latest git commit as a string. """
git_str = subprocess.check_output(['git', 'log', '-1',
'--format=%h']).decode('UTF-8').strip()
return git_str
FULL_VERSION = VERSION
if not IS_RELEASED:
FULL_VERSION += '+' + git_short_hash()
setup(name='MaxwellBloch',
version=FULL_VERSION,
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
|
<commit_before>from setuptools import setup
setup(name='MaxwellBloch',
version='0.3.0.dev',
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
<commit_msg>Add git short hash to unreleased version numbers<commit_after>from setuptools import setup
import subprocess
# Semantic versioning
MAJOR = 0
MINOR = 3
PATCH = 0
IS_RELEASED = False
VERSION = '{0}.{1}.{2}'.format(MAJOR, MINOR, PATCH)
def git_short_hash():
""" Returns the short hash of the latest git commit as a string. """
git_str = subprocess.check_output(['git', 'log', '-1',
'--format=%h']).decode('UTF-8').strip()
return git_str
FULL_VERSION = VERSION
if not IS_RELEASED:
FULL_VERSION += '+' + git_short_hash()
setup(name='MaxwellBloch',
version=FULL_VERSION,
description='A Python package for solving the Maxwell-Bloch equations.',
url='http://github.com/tommyogden/maxwellbloch',
author='Thomas P Ogden',
author_email='t@ogden.eu',
license='MIT',
packages=['maxwellbloch'],
install_requires=['qutip'],
scripts=['bin/make-mp4-fixed-frame.py',
'bin/make-gif-ffmpeg.sh'],
zip_safe=False)
|
80e0b7eb034794120518f29932ab23c67047559a
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a9',
'Beaker>=1.8.1',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
Upgrade Beaker 1.6.4 => 1.8.1
|
Upgrade Beaker 1.6.4 => 1.8.1
|
Python
|
mit
|
TangledWeb/tangled.session
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Upgrade Beaker 1.6.4 => 1.8.1
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a9',
'Beaker>=1.8.1',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Upgrade Beaker 1.6.4 => 1.8.1<commit_after>
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a9',
'Beaker>=1.8.1',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
Upgrade Beaker 1.6.4 => 1.8.1from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a9',
'Beaker>=1.8.1',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
<commit_before>from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'Beaker>=1.6.4',
'tangled>=0.1a9',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
<commit_msg>Upgrade Beaker 1.6.4 => 1.8.1<commit_after>from setuptools import setup
setup(
name='tangled.session',
version='0.1a3.dev0',
description='Tangled session integration',
long_description=open('README.rst').read(),
url='http://tangledframework.org/',
download_url='https://github.com/TangledWeb/tangled.session/tags',
author='Wyatt Baldwin',
author_email='self@wyattbaldwin.com',
packages=[
'tangled',
'tangled.session',
'tangled.session.tests',
],
install_requires=[
'tangled>=0.1a9',
'Beaker>=1.8.1',
],
extras_require={
'dev': [
'tangled[dev]>=0.1a9',
],
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
)
|
006380832d1f45c6c1c4ffad9356e7ed2399d681
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
|
#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
|
Add a content type description to keep twine happy.
|
Add a content type description to keep twine happy.
|
Python
|
bsd-3-clause
|
freakboy3742/pyspamsum,freakboy3742/pyspamsum
|
#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
Add a content type description to keep twine happy.
|
#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
|
<commit_before>#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
<commit_msg>Add a content type description to keep twine happy.<commit_after>
|
#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
|
#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
Add a content type description to keep twine happy.#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
|
<commit_before>#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
<commit_msg>Add a content type description to keep twine happy.<commit_after>#!/usr/bin/env python
import io
from setuptools import setup, Extension
with io.open('README.rst', encoding='utf8') as readme:
long_description = readme.read()
setup(
name="pyspamsum",
version="1.0.5",
description="A Python wrapper for Andrew Tridgell's spamsum algorithm",
long_description=long_description,
long_description_content_type='text/x-rst',
author="Russell Keith-Magee",
author_email="russell@keith-magee.com",
url='http://github.com/freakboy3742/pyspamsum/',
license="New BSD",
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Text Processing',
'Topic :: Utilities',
],
ext_modules=[
Extension(
"spamsum", [
"pyspamsum.c",
"spamsum.c",
"edit_dist.c",
]
)
],
test_suite='tests',
)
|
650d6c72054ac5cd406af8a37e45d0349cf521dc
|
students/psbriant/final_project/test_clean_data.py
|
students/psbriant/final_project/test_clean_data.py
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data.py as cd
import io
|
Add organizational structure and import calls to clean_data.py and io.
|
Add organizational structure and import calls to clean_data.py and io.
|
Python
|
unlicense
|
weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016
|
Add organizational structure and import calls to clean_data.py and io.
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data.py as cd
import io
|
<commit_before><commit_msg>Add organizational structure and import calls to clean_data.py and io.<commit_after>
|
"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data.py as cd
import io
|
Add organizational structure and import calls to clean_data.py and io."""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data.py as cd
import io
|
<commit_before><commit_msg>Add organizational structure and import calls to clean_data.py and io.<commit_after>"""
Name: Paul Briant
Date: 12/11/16
Class: Introduction to Python
Assignment: Final Project
Description:
Tests for Final Project
"""
import clean_data.py as cd
import io
|
|
276f22927890051f66976468585d8351c0ccf5b9
|
sum-of-multiples/sum_of_multiples.py
|
sum-of-multiples/sum_of_multiples.py
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
Use more optimal method of getting multiples
|
Use more optimal method of getting multiples
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
Use more optimal method of getting multiples
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
<commit_before>def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
<commit_msg>Use more optimal method of getting multiples<commit_after>
|
def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
Use more optimal method of getting multiplesdef sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
<commit_before>def sum_of_multiples(limit, factors):
return sum(filter(lambda n: n < limit,
{f*i for i in range(1, limit) for f in factors}))
<commit_msg>Use more optimal method of getting multiples<commit_after>def sum_of_multiples(limit, factors):
return sum({n for f in factors for n in range(f, limit, f)})
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.