commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
37b16cea115419d1353cf1213698fc4a0d229fa7
warehouse/helpers.py
warehouse/helpers.py
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): return request.url_adapter.build(endpoint, values)
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): force_external = values.pop("_force_external", False) return request.url_adapter.build( endpoint, values, force_external=force_external, )
Make it possible to force an external url with the url_for helper
Make it possible to force an external url with the url_for helper
Python
apache-2.0
robhudson/warehouse,mattrobenolt/warehouse,techtonik/warehouse,mattrobenolt/warehouse,robhudson/warehouse,techtonik/warehouse,mattrobenolt/warehouse
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): return request.url_adapter.build(endpoint, values) Make it possible to force an external url with the url_for helper
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): force_external = values.pop("_force_external", False) return request.url_adapter.build( endpoint, values, force_external=force_external, )
<commit_before># Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): return request.url_adapter.build(endpoint, values) <commit_msg>Make it possible to force an external url with the url_for helper<commit_after>
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): force_external = values.pop("_force_external", False) return request.url_adapter.build( endpoint, values, force_external=force_external, )
# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): return request.url_adapter.build(endpoint, values) Make it possible to force an external url with the url_for helper# Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): force_external = values.pop("_force_external", False) return request.url_adapter.build( endpoint, values, force_external=force_external, )
<commit_before># Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): return request.url_adapter.build(endpoint, values) <commit_msg>Make it possible to force an external url with the url_for helper<commit_after># Copyright 2013 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import absolute_import, division, print_function from __future__ import unicode_literals def url_for(request, endpoint, **values): force_external = values.pop("_force_external", False) return request.url_adapter.build( endpoint, values, force_external=force_external, )
27e2cc73f43c5ca8eedee52009652b6195e76198
tests/py/test_notifications.py
tests/py/test_notifications.py
from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"]
from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] def test_blog_announcement(self): assert 'blog/39c07bd031b">indefinitely' in self.client.GET('/').body
Add a test for the blog announcement
Add a test for the blog announcement
Python
mit
gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com
from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] Add a test for the blog announcement
from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] def test_blog_announcement(self): assert 'blog/39c07bd031b">indefinitely' in self.client.GET('/').body
<commit_before>from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] <commit_msg>Add a test for the blog announcement<commit_after>
from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] def test_blog_announcement(self): assert 'blog/39c07bd031b">indefinitely' in self.client.GET('/').body
from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] Add a test for the blog announcementfrom gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] def test_blog_announcement(self): assert 'blog/39c07bd031b">indefinitely' in self.client.GET('/').body
<commit_before>from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] <commit_msg>Add a test for the blog announcement<commit_after>from gratipay.testing import Harness class TestNotifications(Harness): def test_add_single_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_add_multiple_notifications(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') assert alice.notifications == ["abcd", "1234"] def test_add_same_notification_twice(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('abcd') assert alice.notifications == ["abcd"] def test_remove_notification(self): alice = self.make_participant('alice') alice.add_notification('abcd') alice.add_notification('1234') alice.add_notification('bcde') alice.remove_notification('1234') assert alice.notifications == ["abcd", "bcde"] def test_blog_announcement(self): assert 'blog/39c07bd031b">indefinitely' in self.client.GET('/').body
e9467251acfdbceb26c158636d3befe4428ed88b
coda/coda_replication/factories.py
coda/coda_replication/factories.py
""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry
""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 1, 1)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 6, 1)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry
Update dates passed in python3
Update dates passed in python3
Python
bsd-3-clause
unt-libraries/coda,unt-libraries/coda,unt-libraries/coda,unt-libraries/coda
""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry Update dates passed in python3
""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 1, 1)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 6, 1)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry
<commit_before>""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry <commit_msg>Update dates passed in python3<commit_after>
""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 1, 1)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 6, 1)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry
""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry Update dates passed in python3""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 1, 1)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 6, 1)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry
<commit_before>""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 01, 01)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 06, 01)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry <commit_msg>Update dates passed in python3<commit_after>""" Coda Replication Model factories for test fixtures. """ from datetime import datetime import factory from factory import fuzzy from . import models class QueueEntryFactory(factory.django.DjangoModelFactory): ark = factory.Sequence(lambda n: 'ark:/00001/id{0}'.format(n)) bytes = fuzzy.FuzzyInteger(100000000) files = fuzzy.FuzzyInteger(50, 500) url_list = fuzzy.FuzzyText(length=500) status = fuzzy.FuzzyChoice(str(i) for i in range(1, 10)) harvest_start = fuzzy.FuzzyNaiveDateTime(datetime(2015, 1, 1)) harvest_end = fuzzy.FuzzyNaiveDateTime(datetime(2015, 6, 1)) queue_position = fuzzy.FuzzyInteger(1, 100) class Meta: model = models.QueueEntry
b72bea3f6970a095864ec564008f5542dc88eeca
tests/test_vector2_equality.py
tests/test_vector2_equality.py
from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y)
from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors, vector_likes @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors(), y=vectors()) def test_equal_symmetric(x: Vector2, y): assert (x == y) == (y == x) for y_like in vector_likes(y): assert (x == y_like) == (y_like == x) @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y)
Test symmetry of equality, even with vector-likes
tests/equality: Test symmetry of equality, even with vector-likes
Python
artistic-2.0
ppb/ppb-vector,ppb/ppb-vector
from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y) tests/equality: Test symmetry of equality, even with vector-likes
from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors, vector_likes @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors(), y=vectors()) def test_equal_symmetric(x: Vector2, y): assert (x == y) == (y == x) for y_like in vector_likes(y): assert (x == y_like) == (y_like == x) @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y)
<commit_before>from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y) <commit_msg>tests/equality: Test symmetry of equality, even with vector-likes<commit_after>
from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors, vector_likes @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors(), y=vectors()) def test_equal_symmetric(x: Vector2, y): assert (x == y) == (y == x) for y_like in vector_likes(y): assert (x == y_like) == (y_like == x) @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y)
from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y) tests/equality: Test symmetry of equality, even with vector-likesfrom hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors, vector_likes @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors(), y=vectors()) def test_equal_symmetric(x: Vector2, y): assert (x == y) == (y == x) for y_like in vector_likes(y): assert (x == y_like) == (y_like == x) @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y)
<commit_before>from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y) <commit_msg>tests/equality: Test symmetry of equality, even with vector-likes<commit_after>from hypothesis import assume, given from ppb_vector import Vector2 from utils import vectors, vector_likes @given(x=vectors()) def test_equal_self(x: Vector2): assert x == x @given(x=vectors(), y=vectors()) def test_equal_symmetric(x: Vector2, y): assert (x == y) == (y == x) for y_like in vector_likes(y): assert (x == y_like) == (y_like == x) @given(x=vectors()) def test_non_zero_equal(x: Vector2): assume(x != (0, 0)) assert x != 1.1 * x assert x != -x @given(x=vectors(), y=vectors()) def test_not_equal_equivalent(x: Vector2, y: Vector2): assert (x != y) == (not x == y)
b5fe71191bc7c39996d526132720a22c3967b1cf
canopus/schema/core.py
canopus/schema/core.py
from marshmallow import Schema, fields from ..models.core import Post class PostSchema(Schema): id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() def make_object(self, data): return Post(**data)
from marshmallow import Schema, fields, post_load from ..models import Post class PostSchema(Schema): __model__ = Post id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() class Meta: ordered = True @post_load def make_object(self, data): return self.__model__(**data)
Fix post schema for latest marshmallow release
Fix post schema for latest marshmallow release
Python
mit
josuemontano/pyramid-angularjs-starter,josuemontano/pyramid-angularjs-starter,josuemontano/pyramid-angularjs-starter,josuemontano/API-platform,josuemontano/api-starter,josuemontano/API-platform,josuemontano/api-starter,josuemontano/api-starter,josuemontano/API-platform,josuemontano/API-platform
from marshmallow import Schema, fields from ..models.core import Post class PostSchema(Schema): id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() def make_object(self, data): return Post(**data) Fix post schema for latest marshmallow release
from marshmallow import Schema, fields, post_load from ..models import Post class PostSchema(Schema): __model__ = Post id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() class Meta: ordered = True @post_load def make_object(self, data): return self.__model__(**data)
<commit_before>from marshmallow import Schema, fields from ..models.core import Post class PostSchema(Schema): id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() def make_object(self, data): return Post(**data) <commit_msg>Fix post schema for latest marshmallow release<commit_after>
from marshmallow import Schema, fields, post_load from ..models import Post class PostSchema(Schema): __model__ = Post id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() class Meta: ordered = True @post_load def make_object(self, data): return self.__model__(**data)
from marshmallow import Schema, fields from ..models.core import Post class PostSchema(Schema): id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() def make_object(self, data): return Post(**data) Fix post schema for latest marshmallow releasefrom marshmallow import Schema, fields, post_load from ..models import Post class PostSchema(Schema): __model__ = Post id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() class Meta: ordered = True @post_load def make_object(self, data): return self.__model__(**data)
<commit_before>from marshmallow import Schema, fields from ..models.core import Post class PostSchema(Schema): id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() def make_object(self, data): return Post(**data) <commit_msg>Fix post schema for latest marshmallow release<commit_after>from marshmallow import Schema, fields, post_load from ..models import Post class PostSchema(Schema): __model__ = Post id = fields.Integer() title = fields.String() body = fields.String() is_published = fields.Boolean() class Meta: ordered = True @post_load def make_object(self, data): return self.__model__(**data)
b3028843fc9f799d3fe1f52fbd64bb843dcd6f75
picaxe/urls.py
picaxe/urls.py
from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^photologue/', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site)
from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site)
Use photologue as default url
Use photologue as default url
Python
mit
TuinfeesT/PicAxe
from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^photologue/', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site) Use photologue as default url
from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site)
<commit_before>from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^photologue/', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site) <commit_msg>Use photologue as default url<commit_after>
from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site)
from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^photologue/', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site) Use photologue as default urlfrom django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site)
<commit_before>from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^photologue/', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site) <commit_msg>Use photologue as default url<commit_after>from django.conf.urls import patterns, include, url from django.contrib import admin from django.contrib.sites.models import Site urlpatterns = patterns('', # Examples: # url(r'^$', 'picaxe.views.home', name='home'), # url(r'^blog/', include('blog.urls')), url(r'^admin/', include(admin.site.urls)), url(r'^', include('photologue.urls', namespace='photologue')), ) admin.site.unregister(Site)
6963c7c9651d6770c742c50f5fd0fbee68b4f66f
tests/test_rover_init.py
tests/test_rover_init.py
def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_paramaters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_Init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150
def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_parameters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150
Fix test naming spelling errors
Fix test naming spelling errors
Python
mit
authentik8/rover
def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_paramaters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_Init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150 Fix test naming spelling errors
def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_parameters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150
<commit_before>def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_paramaters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_Init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150 <commit_msg>Fix test naming spelling errors<commit_after>
def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_parameters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150
def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_paramaters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_Init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150 Fix test naming spelling errorsdef test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_parameters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150
<commit_before>def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_paramaters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_Init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150 <commit_msg>Fix test naming spelling errors<commit_after>def test_rover_init_with_default_parameters(): from rover import Rover rover = Rover() assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' def test_rover_init_with_custom_parameters(): from rover import Rover rover = Rover(3, 7, 'W') assert rover.x == 3 assert rover.y == 7 assert rover.direction == 'W' def test_rover_init_custom_grid(): from rover import Rover rover = Rover(grid_x=100, grid_y=150) assert rover.x == 0 assert rover.y == 0 assert rover.direction == 'N' assert rover.grid_x == 100 assert rover.grid_y == 150 def test_rover_init_full_custom_grid(): from rover import Rover rover = Rover(5, 9, 'E', 100, 150) assert rover.x == 5 assert rover.y == 9 assert rover.direction == 'E' assert rover.grid_x == 100 assert rover.grid_y == 150
ac2b01e9177d04a6446b770639745010770cb317
nuage_neutron/plugins/nuage_ml2/nuage_subnet_ext_driver.py
nuage_neutron/plugins/nuage_ml2/nuage_subnet_ext_driver.py
# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): return result
# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api from nuage_neutron.plugins.common import nuagedb LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id']) if subnet_mapping: result['vsd_managed'] = subnet_mapping['nuage_managed_subnet'] else: result['vsd_managed'] = False return result
Add 'vsd_managed' to the GET subnet response for ML2
Add 'vsd_managed' to the GET subnet response for ML2 This commit looks up the related nuage_subnet_l2dom_mapping in the database and uses the nuage_managed_subnet field to fill in 'vsd_managed'. False by default. Change-Id: I68957fe3754dc9f1ccf2b6a2b09a762fccd17a89 Closes-Bug: OPENSTACK-1504
Python
apache-2.0
nuagenetworks/nuage-openstack-neutron,naveensan1/nuage-openstack-neutron,naveensan1/nuage-openstack-neutron,nuagenetworks/nuage-openstack-neutron
# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): return result Add 'vsd_managed' to the GET subnet response for ML2 This commit looks up the related nuage_subnet_l2dom_mapping in the database and uses the nuage_managed_subnet field to fill in 'vsd_managed'. False by default. Change-Id: I68957fe3754dc9f1ccf2b6a2b09a762fccd17a89 Closes-Bug: OPENSTACK-1504
# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api from nuage_neutron.plugins.common import nuagedb LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id']) if subnet_mapping: result['vsd_managed'] = subnet_mapping['nuage_managed_subnet'] else: result['vsd_managed'] = False return result
<commit_before># Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): return result <commit_msg>Add 'vsd_managed' to the GET subnet response for ML2 This commit looks up the related nuage_subnet_l2dom_mapping in the database and uses the nuage_managed_subnet field to fill in 'vsd_managed'. False by default. Change-Id: I68957fe3754dc9f1ccf2b6a2b09a762fccd17a89 Closes-Bug: OPENSTACK-1504<commit_after>
# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api from nuage_neutron.plugins.common import nuagedb LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id']) if subnet_mapping: result['vsd_managed'] = subnet_mapping['nuage_managed_subnet'] else: result['vsd_managed'] = False return result
# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): return result Add 'vsd_managed' to the GET subnet response for ML2 This commit looks up the related nuage_subnet_l2dom_mapping in the database and uses the nuage_managed_subnet field to fill in 'vsd_managed'. False by default. Change-Id: I68957fe3754dc9f1ccf2b6a2b09a762fccd17a89 Closes-Bug: OPENSTACK-1504# Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api from nuage_neutron.plugins.common import nuagedb LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id']) if subnet_mapping: result['vsd_managed'] = subnet_mapping['nuage_managed_subnet'] else: result['vsd_managed'] = False return result
<commit_before># Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): return result <commit_msg>Add 'vsd_managed' to the GET subnet response for ML2 This commit looks up the related nuage_subnet_l2dom_mapping in the database and uses the nuage_managed_subnet field to fill in 'vsd_managed'. False by default. Change-Id: I68957fe3754dc9f1ccf2b6a2b09a762fccd17a89 Closes-Bug: OPENSTACK-1504<commit_after># Copyright 2015 Intel Corporation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import log as logging from neutron.plugins.ml2 import driver_api as api from nuage_neutron.plugins.common import nuagedb LOG = logging.getLogger(__name__) class NuageSubnetExtensionDriver(api.ExtensionDriver): _supported_extension_alias = 'nuage-subnet' def initialize(self): pass @property def extension_alias(self): return self._supported_extension_alias def process_create_subnet(self, plugin_context, data, result): result['net_partition'] = data['net_partition'] result['nuagenet'] = data['nuagenet'] def extend_subnet_dict(self, session, db_data, result): subnet_mapping = nuagedb.get_subnet_l2dom_by_id(session, result['id']) if subnet_mapping: result['vsd_managed'] = subnet_mapping['nuage_managed_subnet'] else: result['vsd_managed'] = False return result
4ee689a4825a93cf6b0116b6b7343028c96b5cfb
bernard/discord_notifier.py
bernard/discord_notifier.py
"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except: self.handleError(record)
"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except requests.RequestException: self.handleError(record)
Fix bare 'except' in DiscordHandler
Fix bare 'except' in DiscordHandler
Python
mit
leviroth/bernard
"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except: self.handleError(record) Fix bare 'except' in DiscordHandler
"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except requests.RequestException: self.handleError(record)
<commit_before>"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except: self.handleError(record) <commit_msg>Fix bare 'except' in DiscordHandler<commit_after>
"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except requests.RequestException: self.handleError(record)
"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except: self.handleError(record) Fix bare 'except' in DiscordHandler"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except requests.RequestException: self.handleError(record)
<commit_before>"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except: self.handleError(record) <commit_msg>Fix bare 'except' in DiscordHandler<commit_after>"""A logging handler that emits to a Discord webhook.""" import requests from logging import Handler class DiscordHandler(Handler): """A logging handler that emits to a Discord webhook.""" def __init__(self, webhook, *args, **kwargs): """Initialize the DiscordHandler class.""" super().__init__(*args, **kwargs) self.webhook = webhook def emit(self, record): """Emit record to the Discord webhook.""" json = {'content': self.format(record)} try: requests.post(self.webhook, json=json) except requests.RequestException: self.handleError(record)
1683cb41b5ffee4d48e8ec700382ad40e8370520
astrobin/tests/test_auth.py
astrobin/tests/test_auth.py
# Django from django.contrib.auth.models import User from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/')
# Django from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') def test_password_reset_view(self): response = self.client.get(reverse('password_reset')) self.assertEquals(response.status_code, 200)
Add test to check that password reset view loads fine
Add test to check that password reset view loads fine
Python
agpl-3.0
astrobin/astrobin,astrobin/astrobin,astrobin/astrobin,astrobin/astrobin
# Django from django.contrib.auth.models import User from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') Add test to check that password reset view loads fine
# Django from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') def test_password_reset_view(self): response = self.client.get(reverse('password_reset')) self.assertEquals(response.status_code, 200)
<commit_before># Django from django.contrib.auth.models import User from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') <commit_msg>Add test to check that password reset view loads fine<commit_after>
# Django from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') def test_password_reset_view(self): response = self.client.get(reverse('password_reset')) self.assertEquals(response.status_code, 200)
# Django from django.contrib.auth.models import User from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') Add test to check that password reset view loads fine# Django from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') def test_password_reset_view(self): response = self.client.get(reverse('password_reset')) self.assertEquals(response.status_code, 200)
<commit_before># Django from django.contrib.auth.models import User from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') <commit_msg>Add test to check that password reset view loads fine<commit_after># Django from django.contrib.auth.models import User from django.core.urlresolvers import reverse from django.test import TestCase class LoginTest(TestCase): def setUp(self): self.user = User.objects.create_user( 'test', 'test@test.com', 'password') def tearDown(self): self.user.delete() def test_login_view(self): response = self.client.post( '/accounts/login/', { 'username': 'test', 'password': 'password', }) self.assertRedirects(response, '/') def test_password_reset_view(self): response = self.client.get(reverse('password_reset')) self.assertEquals(response.status_code, 200)
99fbf97643bdfd42b1dc8890a7cfeccc61ae973f
moderator/moderate/auth.py
moderator/moderate/auth.py
from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, email, **kwargs): try: data = is_vouched(email) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(email, **kwargs) return None
from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, claims, **kwargs): try: data = is_vouched(claims.get('email')) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(claims, **kwargs) return None
Fix claims handling on create_user
Fix claims handling on create_user
Python
agpl-3.0
akatsoulas/mozmoderator,mozilla/mozmoderator,johngian/mozmoderator,mozilla/mozmoderator,akatsoulas/mozmoderator,akatsoulas/mozmoderator,johngian/mozmoderator,johngian/mozmoderator,mozilla/mozmoderator,johngian/mozmoderator
from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, email, **kwargs): try: data = is_vouched(email) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(email, **kwargs) return None Fix claims handling on create_user
from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, claims, **kwargs): try: data = is_vouched(claims.get('email')) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(claims, **kwargs) return None
<commit_before>from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, email, **kwargs): try: data = is_vouched(email) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(email, **kwargs) return None <commit_msg>Fix claims handling on create_user<commit_after>
from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, claims, **kwargs): try: data = is_vouched(claims.get('email')) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(claims, **kwargs) return None
from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, email, **kwargs): try: data = is_vouched(email) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(email, **kwargs) return None Fix claims handling on create_userfrom mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, claims, **kwargs): try: data = is_vouched(claims.get('email')) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(claims, **kwargs) return None
<commit_before>from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, email, **kwargs): try: data = is_vouched(email) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(email, **kwargs) return None <commit_msg>Fix claims handling on create_user<commit_after>from mozilla_django_oidc.auth import OIDCAuthenticationBackend from moderator.moderate.mozillians import is_vouched, BadStatusCodeError class ModeratorAuthBackend(OIDCAuthenticationBackend): def create_user(self, claims, **kwargs): try: data = is_vouched(claims.get('email')) except BadStatusCodeError: data = None if data and data['is_vouched']: return super(ModeratorAuthBackend, self).create_user(claims, **kwargs) return None
c72d9060142fe1de1e2201fc355f2ee95f5354c7
src/waldur_mastermind/invoices/migrations/0023_invoice_current_cost.py
src/waldur_mastermind/invoices/migrations/0023_invoice_current_cost.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
Fix database migration for invoices application.
Fix database migration for invoices application.
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/nodeconductor-assembly-waldur
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ] Fix database migration for invoices application.
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ] <commit_msg>Fix database migration for invoices application.<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ] Fix database migration for invoices application.# -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): Invoice = apps.get_model('invoices', 'Invoice') for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ] <commit_msg>Fix database migration for invoices application.<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.7 on 2018-06-19 08:47 from __future__ import unicode_literals from django.db import migrations, models def migrate_data(apps, schema_editor): from waldur_mastermind.invoices.models import Invoice for invoice in Invoice.objects.all(): invoice.update_current_cost() class Migration(migrations.Migration): dependencies = [ ('invoices', '0022_remove_payment_details'), ] operations = [ migrations.AddField( model_name='invoice', name='current_cost', field=models.DecimalField(decimal_places=2, default=0, editable=False, help_text='Cached value for current cost.', max_digits=10), ), migrations.RunPython(migrate_data, reverse_code=migrations.RunPython.noop), ]
d4154f7cde83f3f48ff70bb7abe110e03679ff9d
aiohttp_json_api/helpers.py
aiohttp_json_api/helpers.py
""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping)
""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) def is_instance_or_subclass(val, class_): """Return True if ``val`` is either a subclass or instance of ``class_``.""" try: return issubclass(val, class_) except TypeError: return isinstance(val, class_)
Add helper to check instance and subclass
Add helper to check instance and subclass
Python
mit
vovanbo/aiohttp_json_api
""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) Add helper to check instance and subclass
""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) def is_instance_or_subclass(val, class_): """Return True if ``val`` is either a subclass or instance of ``class_``.""" try: return issubclass(val, class_) except TypeError: return isinstance(val, class_)
<commit_before>""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) <commit_msg>Add helper to check instance and subclass<commit_after>
""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) def is_instance_or_subclass(val, class_): """Return True if ``val`` is either a subclass or instance of ``class_``.""" try: return issubclass(val, class_) except TypeError: return isinstance(val, class_)
""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) Add helper to check instance and subclass""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) def is_instance_or_subclass(val, class_): """Return True if ``val`` is either a subclass or instance of ``class_``.""" try: return issubclass(val, class_) except TypeError: return isinstance(val, class_)
<commit_before>""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) <commit_msg>Add helper to check instance and subclass<commit_after>""" Helpers ======= """ import inspect from collections import Mapping, Iterable def is_generator(obj): """Return True if ``obj`` is a generator """ return inspect.isgeneratorfunction(obj) or inspect.isgenerator(obj) def is_iterable_but_not_string(obj): """Return True if ``obj`` is an iterable object that isn't a string.""" return ( (isinstance(obj, Iterable) and not hasattr(obj, "strip")) or is_generator(obj) ) def is_indexable_but_not_string(obj): """Return True if ``obj`` is indexable but isn't a string.""" return not hasattr(obj, "strip") and hasattr(obj, "__getitem__") def is_collection(obj): """ Return True if ``obj`` is a collection type, e.g list, tuple, queryset. """ return is_iterable_but_not_string(obj) and not isinstance(obj, Mapping) def is_instance_or_subclass(val, class_): """Return True if ``val`` is either a subclass or instance of ``class_``.""" try: return issubclass(val, class_) except TypeError: return isinstance(val, class_)
677d2d4f422f9b05746fa80d63492de4ae9aced4
tests/test_examples.py
tests/test_examples.py
import pytest import examples.basic_usage import examples.basic_usage_manual import examples.dataset import examples.variant_ts_difficulties import examples.variants def test_dataset(unihan_options): examples.dataset.run() def test_variants(unihan_options): examples.variants.run(unihan_options=unihan_options) def test_ts_difficulties(unihan_options): examples.variant_ts_difficulties.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage_manual.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out
import importlib import importlib.util import sys import types import pytest def load_script(example: str) -> types.ModuleType: file_path = f"examples/{example}.py" module_name = "run" spec = importlib.util.spec_from_file_location(module_name, file_path) assert spec is not None module = importlib.util.module_from_spec(spec) sys.modules[module_name] = module assert spec.loader is not None spec.loader.exec_module(module) return module def test_dataset(unihan_options): example = load_script("dataset") example.run() def test_variants(unihan_options): example = load_script("variants") example.run() def test_ts_difficulties(unihan_options): example = load_script("variant_ts_difficulties") example.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage_manual") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out
Rework for handling of examples/
refactor(tests): Rework for handling of examples/
Python
mit
cihai/cihai,cihai/cihai
import pytest import examples.basic_usage import examples.basic_usage_manual import examples.dataset import examples.variant_ts_difficulties import examples.variants def test_dataset(unihan_options): examples.dataset.run() def test_variants(unihan_options): examples.variants.run(unihan_options=unihan_options) def test_ts_difficulties(unihan_options): examples.variant_ts_difficulties.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage_manual.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out refactor(tests): Rework for handling of examples/
import importlib import importlib.util import sys import types import pytest def load_script(example: str) -> types.ModuleType: file_path = f"examples/{example}.py" module_name = "run" spec = importlib.util.spec_from_file_location(module_name, file_path) assert spec is not None module = importlib.util.module_from_spec(spec) sys.modules[module_name] = module assert spec.loader is not None spec.loader.exec_module(module) return module def test_dataset(unihan_options): example = load_script("dataset") example.run() def test_variants(unihan_options): example = load_script("variants") example.run() def test_ts_difficulties(unihan_options): example = load_script("variant_ts_difficulties") example.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage_manual") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out
<commit_before>import pytest import examples.basic_usage import examples.basic_usage_manual import examples.dataset import examples.variant_ts_difficulties import examples.variants def test_dataset(unihan_options): examples.dataset.run() def test_variants(unihan_options): examples.variants.run(unihan_options=unihan_options) def test_ts_difficulties(unihan_options): examples.variant_ts_difficulties.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage_manual.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out <commit_msg>refactor(tests): Rework for handling of examples/<commit_after>
import importlib import importlib.util import sys import types import pytest def load_script(example: str) -> types.ModuleType: file_path = f"examples/{example}.py" module_name = "run" spec = importlib.util.spec_from_file_location(module_name, file_path) assert spec is not None module = importlib.util.module_from_spec(spec) sys.modules[module_name] = module assert spec.loader is not None spec.loader.exec_module(module) return module def test_dataset(unihan_options): example = load_script("dataset") example.run() def test_variants(unihan_options): example = load_script("variants") example.run() def test_ts_difficulties(unihan_options): example = load_script("variant_ts_difficulties") example.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage_manual") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out
import pytest import examples.basic_usage import examples.basic_usage_manual import examples.dataset import examples.variant_ts_difficulties import examples.variants def test_dataset(unihan_options): examples.dataset.run() def test_variants(unihan_options): examples.variants.run(unihan_options=unihan_options) def test_ts_difficulties(unihan_options): examples.variant_ts_difficulties.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage_manual.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out refactor(tests): Rework for handling of examples/import importlib import importlib.util import sys import types import pytest def load_script(example: str) -> types.ModuleType: file_path = f"examples/{example}.py" module_name = "run" spec = importlib.util.spec_from_file_location(module_name, file_path) assert spec is not None module = importlib.util.module_from_spec(spec) sys.modules[module_name] = module assert spec.loader is not None spec.loader.exec_module(module) return module def test_dataset(unihan_options): example = load_script("dataset") example.run() def test_variants(unihan_options): example = load_script("variants") example.run() def test_ts_difficulties(unihan_options): example = load_script("variant_ts_difficulties") example.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage_manual") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out
<commit_before>import pytest import examples.basic_usage import examples.basic_usage_manual import examples.dataset import examples.variant_ts_difficulties import examples.variants def test_dataset(unihan_options): examples.dataset.run() def test_variants(unihan_options): examples.variants.run(unihan_options=unihan_options) def test_ts_difficulties(unihan_options): examples.variant_ts_difficulties.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): examples.basic_usage_manual.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out <commit_msg>refactor(tests): Rework for handling of examples/<commit_after>import importlib import importlib.util import sys import types import pytest def load_script(example: str) -> types.ModuleType: file_path = f"examples/{example}.py" module_name = "run" spec = importlib.util.spec_from_file_location(module_name, file_path) assert spec is not None module = importlib.util.module_from_spec(spec) sys.modules[module_name] = module assert spec.loader is not None spec.loader.exec_module(module) return module def test_dataset(unihan_options): example = load_script("dataset") example.run() def test_variants(unihan_options): example = load_script("variants") example.run() def test_ts_difficulties(unihan_options): example = load_script("variant_ts_difficulties") example.run(unihan_options=unihan_options) def test_basic_usage(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out def test_basic_usage_manual(unihan_options, capsys: pytest.CaptureFixture[str]): example = load_script("basic_usage_manual") example.run(unihan_options=unihan_options) captured = capsys.readouterr() assert "lookup for 好: good" in captured.out assert 'matches for "good": 好' in captured.out
983df9ceaebb42ca31b131f437362193070eb1db
paasta_tools/clusterman.py
paasta_tools/clusterman.py
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
Fix regression in manpages build
Fix regression in manpages build
Python
apache-2.0
Yelp/paasta,Yelp/paasta
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml Fix regression in manpages build
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
<commit_before>import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml <commit_msg>Fix regression in manpages build<commit_after>
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml Fix regression in manpages buildimport staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
<commit_before>import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml <commit_msg>Fix regression in manpages build<commit_after>import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
bf5ec5a459dc9dbe38a6806b513616aa769134a2
amqpy/tests/test_version.py
amqpy/tests/test_version.py
import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = version.split('.') version = tuple([int(i) for i in version]) assert VERSION == version
import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = tuple(map(int, version.split('.'))) assert VERSION == version
Use `map` to test version
Use `map` to test version
Python
mit
veegee/amqpy,gst/amqpy
import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = version.split('.') version = tuple([int(i) for i in version]) assert VERSION == version Use `map` to test version
import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = tuple(map(int, version.split('.'))) assert VERSION == version
<commit_before>import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = version.split('.') version = tuple([int(i) for i in version]) assert VERSION == version <commit_msg>Use `map` to test version<commit_after>
import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = tuple(map(int, version.split('.'))) assert VERSION == version
import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = version.split('.') version = tuple([int(i) for i in version]) assert VERSION == version Use `map` to test versionimport re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = tuple(map(int, version.split('.'))) assert VERSION == version
<commit_before>import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = version.split('.') version = tuple([int(i) for i in version]) assert VERSION == version <commit_msg>Use `map` to test version<commit_after>import re def get_field(doc: str, name: str): match = re.search(':{}: (.*)$'.format(name), doc, re.IGNORECASE | re.MULTILINE) if match: return match.group(1).strip() class TestVersion: def test_version_is_consistent(self): from .. import VERSION with open('README.rst') as f: readme = f.read() version = get_field(readme, 'version') version = tuple(map(int, version.split('.'))) assert VERSION == version
5fd25b11eac4725ca7da879082d9334481fa59b8
python/default_crab_config.py
python/default_crab_config.py
__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config
__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config
Send `external` folder with crab jobs
Send `external` folder with crab jobs
Python
mit
cp3-llbb/GridIn,cp3-llbb/GridIn
__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config Send `external` folder with crab jobs
__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config
<commit_before>__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config <commit_msg>Send `external` folder with crab jobs<commit_after>
__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config
__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config Send `external` folder with crab jobs__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config
<commit_before>__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config <commit_msg>Send `external` folder with crab jobs<commit_after>__author__ = 'sbrochet' def create_config(is_mc): """ Create a default CRAB configuration suitable to run the framework :return: """ from CRABClient.UserUtilities import config, getUsernameFromSiteDB config = config() config.General.workArea = 'tasks' config.General.transferOutputs = True config.General.transferLogs = True config.JobType.pluginName = 'Analysis' config.JobType.disableAutomaticOutputCollection = True config.JobType.outputFiles = [] config.JobType.allowUndistributedCMSSW = True config.JobType.sendExternalFolder = True # To send electron MVA ids with jobs config.Data.inputDBS = 'global' if is_mc: config.Data.splitting = 'FileBased' else: config.Data.splitting = 'LumiBased' config.Data.outLFNDirBase = '/store/user/%s/' % (getUsernameFromSiteDB()) config.Data.publication = False config.Site.storageSite = 'T2_BE_UCL' return config
5a889dee78335d3c7d758c1df16d774160049b12
djangoprojects/django_rest_framework/tutorial/snippets/views.py
djangoprojects/django_rest_framework/tutorial/snippets/views.py
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
Add an api view for the API root
Add an api view for the API root not plugged yet
Python
unlicense
bertrandvidal/stuff,bertrandvidal/stuff,bertrandvidal/stuff,bertrandvidal/stuff
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly) Add an api view for the API root not plugged yet
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
<commit_before>from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly) <commit_msg>Add an api view for the API root not plugged yet<commit_after>
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly) Add an api view for the API root not plugged yetfrom snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
<commit_before>from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly) <commit_msg>Add an api view for the API root not plugged yet<commit_after>from snippets.models import Snippet from snippets.serializers import SnippetSerializer from rest_framework import generics from rest_framework import permissions from django.contrib.auth.models import User from snippets.serializers import UserSerializer from snippets.permissions import IsOwnerOrReadOnly from rest_framework.decorators import api_view from rest_framework.response import Response from rest_framework.reverse import reverse @api_view(("GET", )) def api_root(request, format=None): return Response({ "users": reverse("user-list", request=request, format=format), "snippets": reverse("snippet-list", request=request, format=format) }) class UserList(generics.ListAPIView): queryset = User.objects.all() serializer_class = UserSerializer class UserDetail(generics.RetrieveAPIView): queryset = User.objects.all() serializer_class = UserSerializer class SnippetList(generics.ListCreateAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly,) def perform_create(self, serializer): serializer.save(owner=self.request.user) class SnippetDetail(generics.RetrieveUpdateDestroyAPIView): queryset = Snippet.objects.all() serializer_class = SnippetSerializer permission_classes = (permissions.IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly)
35d4bc796f27bdf5aea7c3dea83c581260a4669a
tests/testapp/testapp/blog/__init__.py
tests/testapp/testapp/blog/__init__.py
def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]+}') config.add_route('blog.page', '/page/{page_slug}')
def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]{1,}}') config.add_route('blog.page', '/page/{page_slug}')
Use patterns with explicit length
Use patterns with explicit length
Python
mit
avanov/Rhetoric,avanov/Rhetoric
def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]+}') config.add_route('blog.page', '/page/{page_slug}') Use patterns with explicit length
def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]{1,}}') config.add_route('blog.page', '/page/{page_slug}')
<commit_before>def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]+}') config.add_route('blog.page', '/page/{page_slug}') <commit_msg>Use patterns with explicit length<commit_after>
def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]{1,}}') config.add_route('blog.page', '/page/{page_slug}')
def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]+}') config.add_route('blog.page', '/page/{page_slug}') Use patterns with explicit lengthdef includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]{1,}}') config.add_route('blog.page', '/page/{page_slug}')
<commit_before>def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]+}') config.add_route('blog.page', '/page/{page_slug}') <commit_msg>Use patterns with explicit length<commit_after>def includeme(config): config.add_route('test.new.routes', '/test/new/routes/{param:[a-z]{1,}}') config.add_route('blog.page', '/page/{page_slug}')
3f5149841163ab3e79fbd69990e53791281ec4a6
opps/articles/templatetags/article_tags.py
opps/articles/templatetags/article_tags.py
# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes}))
# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, date_available__lte=timezone.now(), published=True, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes}))
Add validate published on templatetag get all articlebox
Add validate published on templatetag get all articlebox
Python
mit
jeanmask/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,YACOWS/opps,opps/opps,opps/opps,opps/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,YACOWS/opps,williamroot/opps,williamroot/opps
# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes})) Add validate published on templatetag get all articlebox
# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, date_available__lte=timezone.now(), published=True, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes}))
<commit_before># -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes})) <commit_msg>Add validate published on templatetag get all articlebox<commit_after>
# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, date_available__lte=timezone.now(), published=True, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes}))
# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes})) Add validate published on templatetag get all articlebox# -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, date_available__lte=timezone.now(), published=True, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes}))
<commit_before># -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes})) <commit_msg>Add validate published on templatetag get all articlebox<commit_after># -*- coding: utf-8 -*- from django import template from django.conf import settings from django.utils import timezone from opps.articles.models import ArticleBox register = template.Library() @register.simple_tag def get_articlebox(slug, channel_slug=None, template_name=None): if channel_slug: slug = slug + '-' + channel_slug try: box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug, date_available__lte=timezone.now(), published=True) except ArticleBox.DoesNotExist: box = None t = template.loader.get_template('articles/articlebox_detail.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articlebox': box, 'slug': slug})) @register.simple_tag def get_all_articlebox(channel_slug, template_name=None): boxes = ArticleBox.objects.filter(site=settings.SITE_ID, date_available__lte=timezone.now(), published=True, channel__slug=channel_slug) t = template.loader.get_template('articles/articlebox_list.html') if template_name: t = template.loader.get_template(template_name) return t.render(template.Context({'articleboxes': boxes}))
54cb7685550c1c5238bb2f519306e4b5db5fc9f0
webapp-django/challenges/views.py
webapp-django/challenges/views.py
from django.core.files.storage import FileSystemStorage from django.shortcuts import render, redirect from django.http import HttpResponse from .models import Challenge # from .forms import DocumentForm def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges})
from django.http import HttpResponse from django.shortcuts import render from .models import Challenge def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) ''' def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges})
Comment out some useless code in challenges
Comment out some useless code in challenges
Python
mit
super1337/Super1337-CTF,super1337/Super1337-CTF,super1337/Super1337-CTF
from django.core.files.storage import FileSystemStorage from django.shortcuts import render, redirect from django.http import HttpResponse from .models import Challenge # from .forms import DocumentForm def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges}) Comment out some useless code in challenges
from django.http import HttpResponse from django.shortcuts import render from .models import Challenge def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) ''' def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges})
<commit_before>from django.core.files.storage import FileSystemStorage from django.shortcuts import render, redirect from django.http import HttpResponse from .models import Challenge # from .forms import DocumentForm def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges}) <commit_msg>Comment out some useless code in challenges<commit_after>
from django.http import HttpResponse from django.shortcuts import render from .models import Challenge def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) ''' def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges})
from django.core.files.storage import FileSystemStorage from django.shortcuts import render, redirect from django.http import HttpResponse from .models import Challenge # from .forms import DocumentForm def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges}) Comment out some useless code in challengesfrom django.http import HttpResponse from django.shortcuts import render from .models import Challenge def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) ''' def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges})
<commit_before>from django.core.files.storage import FileSystemStorage from django.shortcuts import render, redirect from django.http import HttpResponse from .models import Challenge # from .forms import DocumentForm def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges}) <commit_msg>Comment out some useless code in challenges<commit_after>from django.http import HttpResponse from django.shortcuts import render from .models import Challenge def download(req): response = HttpResponse(content_type='application/zip') response['Content-Disposition'] = 'attachment; filename=myfile.zip' return response def index(request): challenges = Challenge.objects.all() return render(request, 'challenges/index.html', {'challenges': challenges}) ''' path=settings.MEDIA_ROOT file_list =os.listdir(path) return render(request,'challenges/index.html', {'files': file_list}) ''' ''' def upload(request): if request.method == 'POST' and request.FILES['myfile']: myfile = request.FILES['myfile'] fs = FileSystemStorage() filename = fs.save(myfile.name, myfile) uploaded_file_url = fs.url(filename) return render(request, 'challenges/upload.html', { 'uploaded_file_url': uploaded_file_url }) return render(request, 'challenges/upload.html') def upload2(request): if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): form.save() return redirect('/jeopardy') else: form = DocumentForm() return render(request, 'challenges/upload2.html', { 'form': form }) ''' def textBased(request): challenges = Challenge.objects.all() return render(request, 'challenges/textBased.html', {'challenges': challenges})
57e04bdafd571c0b8ce2c1706fb170629dea2840
salt/grains/minion_process.py
salt/grains/minion_process.py
# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: groupname = grp.getgrgid(os.getgid()).gr_name else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret
# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: try: groupname = grp.getgrgid(os.getgid()).gr_name except KeyError: groupname = '' else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret
Fix gid not found bug
Fix gid not found bug
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: groupname = grp.getgrgid(os.getgid()).gr_name else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret Fix gid not found bug
# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: try: groupname = grp.getgrgid(os.getgid()).gr_name except KeyError: groupname = '' else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret
<commit_before># -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: groupname = grp.getgrgid(os.getgid()).gr_name else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret <commit_msg>Fix gid not found bug<commit_after>
# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: try: groupname = grp.getgrgid(os.getgid()).gr_name except KeyError: groupname = '' else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret
# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: groupname = grp.getgrgid(os.getgid()).gr_name else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret Fix gid not found bug# -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: try: groupname = grp.getgrgid(os.getgid()).gr_name except KeyError: groupname = '' else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret
<commit_before># -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: groupname = grp.getgrgid(os.getgid()).gr_name else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret <commit_msg>Fix gid not found bug<commit_after># -*- coding: utf-8 -*- ''' Set grains describing the minion process. ''' from __future__ import absolute_import, print_function, unicode_literals import os # Import salt libs import salt.utils.platform try: import pwd except ImportError: import getpass pwd = None try: import grp except ImportError: grp = None def _uid(): ''' Grain for the minion User ID ''' if salt.utils.platform.is_windows(): return None return os.getuid() def _username(): ''' Grain for the minion username ''' if pwd: username = pwd.getpwuid(os.getuid()).pw_name else: username = getpass.getuser() return username def _gid(): ''' Grain for the minion Group ID ''' if salt.utils.platform.is_windows(): return None return os.getgid() def _groupname(): ''' Grain for the minion groupname ''' if grp: try: groupname = grp.getgrgid(os.getgid()).gr_name except KeyError: groupname = '' else: groupname = '' return groupname def _pid(): return os.getpid() def grains(): ret = { 'username': _username(), 'groupname': _groupname(), 'pid': _pid(), } if not salt.utils.platform.is_windows(): ret['gid'] = _gid() ret['uid'] = _uid() return ret
e5b1eeab4486d2182396a7f8e64d0a05207a3f5f
bom_data_parser/__init__.py
bom_data_parser/__init__.py
from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json
from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
Fix up imports in package.
Fix up imports in package.
Python
bsd-3-clause
amacd31/bom_data_parser,amacd31/bom_data_parser
from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json Fix up imports in package.
from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
<commit_before>from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json <commit_msg>Fix up imports in package.<commit_after>
from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json Fix up imports in package.from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
<commit_before>from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json <commit_msg>Fix up imports in package.<commit_after>from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
dfdac5764236ce9301e7997443b6de4a7a4b4473
scripts/convert_gml_to_csv.py
scripts/convert_gml_to_csv.py
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True)
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
Add outfile option to conversion script
Add outfile option to conversion script
Python
mit
gaberosser/geo-network
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True) Add outfile option to conversion script
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
<commit_before>import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True) <commit_msg>Add outfile option to conversion script<commit_after>
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True) Add outfile option to conversion scriptimport sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
<commit_before>import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] res = gml_to_node_edge_list(in_file, routing=True) <commit_msg>Add outfile option to conversion script<commit_after>import sys import os sys.path.append(os.path.abspath(os.path.curdir)) from converter import gml_to_node_edge_list if __name__ == '__main__': in_file = sys.argv[1] outfile = sys.argv[2] if len(sys.argv) > 2 else None res = gml_to_node_edge_list(in_file, outfile=outfile, routing=True)
1fef8dbb26aec9b0f3f174e09789461714e55ac5
snmpy/disk_utilization.py
snmpy/disk_utilization.py
import os, time, subprocess import logging as log class disk_utilization: def __init__(self, conf): os.environ['LC_TIME'] = 'POSIX' self.devs = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')] def len(self): return len(self.devs) def key(self, idx): return 'string', self.devs[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.devs[idx - 1], 0)
import os import time import snmpy import subprocess import logging as log class disk_utilization(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.data[idx - 1], 0) def worker(self): os.environ['LC_TIME'] = 'POSIX' self.data = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')]
Convert to use the base class and update for new plugin path.
Convert to use the base class and update for new plugin path.
Python
mit
mk23/snmpy,mk23/snmpy
import os, time, subprocess import logging as log class disk_utilization: def __init__(self, conf): os.environ['LC_TIME'] = 'POSIX' self.devs = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')] def len(self): return len(self.devs) def key(self, idx): return 'string', self.devs[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.devs[idx - 1], 0) Convert to use the base class and update for new plugin path.
import os import time import snmpy import subprocess import logging as log class disk_utilization(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.data[idx - 1], 0) def worker(self): os.environ['LC_TIME'] = 'POSIX' self.data = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')]
<commit_before>import os, time, subprocess import logging as log class disk_utilization: def __init__(self, conf): os.environ['LC_TIME'] = 'POSIX' self.devs = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')] def len(self): return len(self.devs) def key(self, idx): return 'string', self.devs[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.devs[idx - 1], 0) <commit_msg>Convert to use the base class and update for new plugin path.<commit_after>
import os import time import snmpy import subprocess import logging as log class disk_utilization(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.data[idx - 1], 0) def worker(self): os.environ['LC_TIME'] = 'POSIX' self.data = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')]
import os, time, subprocess import logging as log class disk_utilization: def __init__(self, conf): os.environ['LC_TIME'] = 'POSIX' self.devs = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')] def len(self): return len(self.devs) def key(self, idx): return 'string', self.devs[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.devs[idx - 1], 0) Convert to use the base class and update for new plugin path.import os import time import snmpy import subprocess import logging as log class disk_utilization(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.data[idx - 1], 0) def worker(self): os.environ['LC_TIME'] = 'POSIX' self.data = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')]
<commit_before>import os, time, subprocess import logging as log class disk_utilization: def __init__(self, conf): os.environ['LC_TIME'] = 'POSIX' self.devs = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')] def len(self): return len(self.devs) def key(self, idx): return 'string', self.devs[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.devs[idx - 1], 0) <commit_msg>Convert to use the base class and update for new plugin path.<commit_after>import os import time import snmpy import subprocess import logging as log class disk_utilization(snmpy.plugin): def __init__(self, conf, script=False): snmpy.plugin.__init__(self, conf, script) def key(self, idx): return 'string', self.data[idx - 1] def val(self, idx): ts = time.localtime(time.time() - 60 * 20) results = {} command = ['/usr/bin/sar', '-d', '-f', '/var/log/sysstat/sa%02d' % ts.tm_mday, '-s', time.strftime('%H:%M:00', ts)] log.debug('running command: %s', ' '.join(command)) for line in subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0].split('\n'): line = line.split() log.debug('line: %s', line) if len(line) and line[0] != 'Average:' and line[1].startswith('dev'): results[line[1]] = int(float(line[9])) log.debug('results: %s', results) return 'integer', results.get(self.data[idx - 1], 0) def worker(self): os.environ['LC_TIME'] = 'POSIX' self.data = ['dev%s-%s' % tuple(line.split()[0:2]) for line in open('/proc/diskstats')]
2f34d442157f86af4fd75c48ea2cf568fbef34f6
migrations/versions/223041bb858b_message_contact_association.py
migrations/versions/223041bb858b_message_contact_association.py
"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' # Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a # script to process contacts from messages, so it's very expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation')
"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) # Yes, this is a terrible hack. But tools/rerank_contacts.py already # contains a script to process contacts from messages, so it's very # expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation')
Rearrange imports in previous migration.
Rearrange imports in previous migration. According to mg bad things can happen if you try to do stuff outside of a migration's upgrade() function.
Python
agpl-3.0
wakermahmud/sync-engine,nylas/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,rmasters/inbox,jobscore/sync-engine,closeio/nylas,Eagles2F/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,closeio/nylas,Eagles2F/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,rmasters/inbox,PriviPK/privipk-sync-engine,gale320/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,EthanBlackburn/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,rmasters/inbox,nylas/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,rmasters/inbox,Eagles2F/sync-engine,ErinCall/sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,gale320/sync-engine,gale320/sync-engine,nylas/sync-engine,jobscore/sync-engine,gale320/sync-engine
"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' # Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a # script to process contacts from messages, so it's very expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation') Rearrange imports in previous migration. According to mg bad things can happen if you try to do stuff outside of a migration's upgrade() function.
"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) # Yes, this is a terrible hack. But tools/rerank_contacts.py already # contains a script to process contacts from messages, so it's very # expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation')
<commit_before>"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' # Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a # script to process contacts from messages, so it's very expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation') <commit_msg>Rearrange imports in previous migration. According to mg bad things can happen if you try to do stuff outside of a migration's upgrade() function.<commit_after>
"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) # Yes, this is a terrible hack. But tools/rerank_contacts.py already # contains a script to process contacts from messages, so it's very # expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation')
"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' # Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a # script to process contacts from messages, so it's very expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation') Rearrange imports in previous migration. According to mg bad things can happen if you try to do stuff outside of a migration's upgrade() function."""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) # Yes, this is a terrible hack. But tools/rerank_contacts.py already # contains a script to process contacts from messages, so it's very # expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation')
<commit_before>"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' # Yes, this is a terrible hack. But tools/rerank_contacts.py already contains a # script to process contacts from messages, so it's very expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation') <commit_msg>Rearrange imports in previous migration. According to mg bad things can happen if you try to do stuff outside of a migration's upgrade() function.<commit_after>"""message contact association Revision ID: 223041bb858b Revises: 2c9f3a06de09 Create Date: 2014-04-28 23:52:05.449401 """ # revision identifiers, used by Alembic. revision = '223041bb858b' down_revision = '2c9f3a06de09' from alembic import op import sqlalchemy as sa def upgrade(): op.create_table( 'messagecontactassociation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('contact_id', sa.Integer(), nullable=False), sa.Column('message_id', sa.Integer(), nullable=False), sa.Column('field', sa.Enum('from_addr', 'to_addr', 'cc_addr', 'bcc_addr'), nullable=True), sa.ForeignKeyConstraint(['contact_id'], ['contact.id'], ), sa.ForeignKeyConstraint(['message_id'], ['message.id'], ), sa.PrimaryKeyConstraint('id', 'contact_id', 'message_id') ) # Yes, this is a terrible hack. But tools/rerank_contacts.py already # contains a script to process contacts from messages, so it's very # expedient. import sys sys.path.append('./tools') from rerank_contacts import rerank_contacts rerank_contacts() def downgrade(): op.drop_table('messagecontactassociation')
47eac4ef8acca10023f2f43dd3fea0e0abbc1202
apps/organizations/admin.py
apps/organizations/admin.py
from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin)
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
Add Admin page for OrganizationMember.
Add Admin page for OrganizationMember.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) Add Admin page for OrganizationMember.
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
<commit_before>from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) <commit_msg>Add Admin page for OrganizationMember.<commit_after>
from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) Add Admin page for OrganizationMember.from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
<commit_before>from apps.organizations.models import Organization, OrganizationAddress from django.contrib import admin class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) <commit_msg>Add Admin page for OrganizationMember.<commit_after>from django.contrib import admin from apps.organizations.models import ( Organization, OrganizationAddress, OrganizationMember ) class OrganizationAddressAdmin(admin.StackedInline): model = OrganizationAddress extra = 1 class OrganizationAdmin(admin.ModelAdmin): inlines = (OrganizationAddressAdmin,) admin.site.register(Organization, OrganizationAdmin) admin.site.register(OrganizationMember)
48e04ae85c563ab6af03773535ebeed748d33572
flynn/__init__.py
flynn/__init__.py
# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s)
# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def dumph(obj): return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj)) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) def loadh(s): return flynn.decoder.decode(s)
Implement dumph to generate input for cbor.me
Implement dumph to generate input for cbor.me
Python
mit
fritz0705/flynn
# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) Implement dumph to generate input for cbor.me
# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def dumph(obj): return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj)) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) def loadh(s): return flynn.decoder.decode(s)
<commit_before># coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) <commit_msg>Implement dumph to generate input for cbor.me<commit_after>
# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def dumph(obj): return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj)) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) def loadh(s): return flynn.decoder.decode(s)
# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) Implement dumph to generate input for cbor.me# coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def dumph(obj): return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj)) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) def loadh(s): return flynn.decoder.decode(s)
<commit_before># coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) <commit_msg>Implement dumph to generate input for cbor.me<commit_after># coding: utf-8 import flynn.decoder import flynn.encoder def dump(obj, fp): return flynn.encoder.encode(fp, obj) def dumps(obj): return flynn.encoder.encode_str(obj) def dumph(obj): return "".join(hex(n)[2:].rjust(2, "0") for n in dumps(obj)) def load(s): return flynn.decoder.decode(s) def loads(s): return flynn.decoder.decode(s) def loadh(s): return flynn.decoder.decode(s)
e1e7152ae23ce5f4e8219254581e3a3c13960149
linter.py
linter.py
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args
Fix a pydocstyle warning in .travis.yml
Fix a pydocstyle warning in .travis.yml
Python
mit
SublimeLinter/SublimeLinter-luacheck
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args Fix a pydocstyle warning in .travis.yml
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args
<commit_before># # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args <commit_msg>Fix a pydocstyle warning in .travis.yml<commit_after>
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args
# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args Fix a pydocstyle warning in .travis.yml# # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args
<commit_before># # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args <commit_msg>Fix a pydocstyle warning in .travis.yml<commit_after># # linter.py # Linter for SublimeLinter3, a code checking framework for Sublime Text 3 # # Copyright (c) 2014 CorvisaCloud, LLC # # License: MIT # """This module exports the Luacheck plugin class.""" from SublimeLinter.lint import Linter class Luacheck(Linter): """Provides an interface to luacheck.""" syntax = 'lua' tempfile_suffix = 'lua' defaults = { '--ignore:,': ['channel'], '--only:,': [], '--limit=': None, '--globals:,': [], } comment_re = r'\s*--' inline_settings = 'limit' inline_overrides = ('ignore', 'only', 'globals') cmd = 'luacheck @ *' regex = r'^(?P<filename>.+):(?P<line>\d+):(?P<col>\d+): (?P<message>.*)$' def build_args(self, settings): """Return args, transforming --ignore, --only, and --globals args into a format luacheck understands.""" args = super().build_args(settings) for arg in ('--ignore', '--only', '--globals'): try: index = args.index(arg) values = args[index + 1].split(',') args[index + 1:index + 2] = values except ValueError: pass return args
130b6d47e95c2e538cd5842f6f2f2a88fd9bf9dd
djangocms_forms/cms_app.py
djangocms_forms/cms_app.py
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Django CMS Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook)
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook)
Rename `CMSApp` to Forms — make it consistent to Django verbose app name.
Rename `CMSApp` to Forms — make it consistent to Django verbose app name.
Python
bsd-3-clause
mishbahr/djangocms-forms,mishbahr/djangocms-forms,mishbahr/djangocms-forms
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Django CMS Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook) Rename `CMSApp` to Forms — make it consistent to Django verbose app name.
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook)
<commit_before>from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Django CMS Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook) <commit_msg>Rename `CMSApp` to Forms — make it consistent to Django verbose app name.<commit_after>
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook)
from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Django CMS Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook) Rename `CMSApp` to Forms — make it consistent to Django verbose app name.from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook)
<commit_before>from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Django CMS Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook) <commit_msg>Rename `CMSApp` to Forms — make it consistent to Django verbose app name.<commit_after>from __future__ import unicode_literals from django.utils.translation import ugettext_lazy as _ from cms.app_base import CMSApp from cms.apphook_pool import apphook_pool class DjangoCMSFormsApphook(CMSApp): name = _('Forms') urls = ['djangocms_forms.urls'] apphook_pool.register(DjangoCMSFormsApphook)
0aa5540cef1e3137147cd379eaffc98208b78595
copy-labels.py
copy-labels.py
#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code != 200: print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform")
#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code == 201: print("Copied {}".format(label['name'])) continue if r.status_code == 422 and r.json()['errors'][0]['code'] == 'already_exists': continue print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform")
Make copying labels less verbose when things are fine.:
Make copying labels less verbose when things are fine.:
Python
apache-2.0
edx/repo-tools,edx/repo-tools
#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code != 200: print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform") Make copying labels less verbose when things are fine.:
#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code == 201: print("Copied {}".format(label['name'])) continue if r.status_code == 422 and r.json()['errors'][0]['code'] == 'already_exists': continue print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform")
<commit_before>#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code != 200: print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform") <commit_msg>Make copying labels less verbose when things are fine.:<commit_after>
#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code == 201: print("Copied {}".format(label['name'])) continue if r.status_code == 422 and r.json()['errors'][0]['code'] == 'already_exists': continue print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform")
#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code != 200: print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform") Make copying labels less verbose when things are fine.:#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code == 201: print("Copied {}".format(label['name'])) continue if r.status_code == 422 and r.json()['errors'][0]['code'] == 'already_exists': continue print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform")
<commit_before>#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code != 200: print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform") <commit_msg>Make copying labels less verbose when things are fine.:<commit_after>#!/usr/bin/env python """Copy tags from one repo to others.""" from __future__ import print_function import json import requests import yaml from helpers import paginated_get LABELS_URL = "https://api.github.com/repos/{owner_repo}/labels" def get_labels(owner_repo): url = LABELS_URL.format(owner_repo=owner_repo) labels = paginated_get(url) labels = list(labels) for label in labels: del label['url'] return labels def set_labels(owner_repo, labels): for label in labels: url = LABELS_URL.format(owner_repo=owner_repo) r = requests.post(url, data=json.dumps(label)) if r.status_code == 201: print("Copied {}".format(label['name'])) continue if r.status_code == 422 and r.json()['errors'][0]['code'] == 'already_exists': continue print(r.status_code) print(r.text) def copy_labels(source_owner_repo): labels = get_labels(source_owner_repo) with open("repos.yaml") as repos_file: REPO_INFO = yaml.load(repos_file) for owner_repo in sorted(REPO_INFO): if owner_repo == source_owner_repo: continue print("Copying labels into {}".format(owner_repo)) set_labels(owner_repo, labels) if __name__ == "__main__": copy_labels("edx/edx-platform")
110c362e3e8436700707c2306d115b3b2476a79d
core/models.py
core/models.py
from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from social_auth.signals import socialauth_registered def create_profile(sender, user, response, details, **kwargs): try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id))) socialauth_registered.connect(create_profile, sender=None)
from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from social_auth.signals import socialauth_registered class Account(models.Model): """ A user's account balance. """ user = models.OneToOneField(User) balance = models.IntegerField(default=5000) @receiver(post_save, sender=User) def user_saved(sender, **kwargs): """ Create an initial account balance for new users. """ Account.objects.get_or_create(user=kwargs["instance"]) @receiver(socialauth_registered, sender=None) def avatar(sender, user, response, details, **kwargs): """ Download the user's Twitter or Facebook avatar once they've authenticated via either service. """ try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id)))
Add initial account balance for users.
Add initial account balance for users.
Python
bsd-2-clause
stephenmcd/gamblor,stephenmcd/gamblor
from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from social_auth.signals import socialauth_registered def create_profile(sender, user, response, details, **kwargs): try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id))) socialauth_registered.connect(create_profile, sender=None) Add initial account balance for users.
from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from social_auth.signals import socialauth_registered class Account(models.Model): """ A user's account balance. """ user = models.OneToOneField(User) balance = models.IntegerField(default=5000) @receiver(post_save, sender=User) def user_saved(sender, **kwargs): """ Create an initial account balance for new users. """ Account.objects.get_or_create(user=kwargs["instance"]) @receiver(socialauth_registered, sender=None) def avatar(sender, user, response, details, **kwargs): """ Download the user's Twitter or Facebook avatar once they've authenticated via either service. """ try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id)))
<commit_before> from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from social_auth.signals import socialauth_registered def create_profile(sender, user, response, details, **kwargs): try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id))) socialauth_registered.connect(create_profile, sender=None) <commit_msg>Add initial account balance for users.<commit_after>
from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from social_auth.signals import socialauth_registered class Account(models.Model): """ A user's account balance. """ user = models.OneToOneField(User) balance = models.IntegerField(default=5000) @receiver(post_save, sender=User) def user_saved(sender, **kwargs): """ Create an initial account balance for new users. """ Account.objects.get_or_create(user=kwargs["instance"]) @receiver(socialauth_registered, sender=None) def avatar(sender, user, response, details, **kwargs): """ Download the user's Twitter or Facebook avatar once they've authenticated via either service. """ try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id)))
from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from social_auth.signals import socialauth_registered def create_profile(sender, user, response, details, **kwargs): try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id))) socialauth_registered.connect(create_profile, sender=None) Add initial account balance for users. from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from social_auth.signals import socialauth_registered class Account(models.Model): """ A user's account balance. """ user = models.OneToOneField(User) balance = models.IntegerField(default=5000) @receiver(post_save, sender=User) def user_saved(sender, **kwargs): """ Create an initial account balance for new users. """ Account.objects.get_or_create(user=kwargs["instance"]) @receiver(socialauth_registered, sender=None) def avatar(sender, user, response, details, **kwargs): """ Download the user's Twitter or Facebook avatar once they've authenticated via either service. """ try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id)))
<commit_before> from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from social_auth.signals import socialauth_registered def create_profile(sender, user, response, details, **kwargs): try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id))) socialauth_registered.connect(create_profile, sender=None) <commit_msg>Add initial account balance for users.<commit_after> from os import makedirs from os.path import join, exists from urllib import urlretrieve from django.conf import settings from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save from django.dispatch import receiver from social_auth.signals import socialauth_registered class Account(models.Model): """ A user's account balance. """ user = models.OneToOneField(User) balance = models.IntegerField(default=5000) @receiver(post_save, sender=User) def user_saved(sender, **kwargs): """ Create an initial account balance for new users. """ Account.objects.get_or_create(user=kwargs["instance"]) @receiver(socialauth_registered, sender=None) def avatar(sender, user, response, details, **kwargs): """ Download the user's Twitter or Facebook avatar once they've authenticated via either service. """ try: # twitter photo_url = response["profile_image_url"] photo_url = "_reasonably_small".join(photo_url.rsplit("_normal", 1)) except KeyError: # facebook photo_url = "http://graph.facebook.com/%s/picture?type=large" % response["id"] path = join(settings.MEDIA_ROOT, "photos") if not exists(path): makedirs(path) urlretrieve(photo_url, join(path, str(user.id)))
6be57a38751e42c9544e29168db05cba611acbb1
payments/management/commands/init_plans.py
payments/management/commands/init_plans.py
import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan)
import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], trial_period_days=settings.PAYMENTS_PLANS[plan].get("trial_period_days"), id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan)
Add trial period days option to initial plans.
Add trial period days option to initial plans.
Python
mit
aibon/django-stripe-payments,grue/django-stripe-payments,jawed123/django-stripe-payments,grue/django-stripe-payments,wahuneke/django-stripe-payments,boxysean/django-stripe-payments,jamespacileo/django-stripe-payments,boxysean/django-stripe-payments,pinax/django-stripe-payments,ZeevG/django-stripe-payments,jawed123/django-stripe-payments,ZeevG/django-stripe-payments,crehana/django-stripe-payments,adi-li/django-stripe-payments,crehana/django-stripe-payments,wahuneke/django-stripe-payments,alexhayes/django-stripe-payments,wahuneke/django-stripe-payments,alexhayes/django-stripe-payments,adi-li/django-stripe-payments,aibon/django-stripe-payments,jamespacileo/django-stripe-payments
import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan) Add trial period days option to initial plans.
import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], trial_period_days=settings.PAYMENTS_PLANS[plan].get("trial_period_days"), id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan)
<commit_before>import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan) <commit_msg>Add trial period days option to initial plans.<commit_after>
import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], trial_period_days=settings.PAYMENTS_PLANS[plan].get("trial_period_days"), id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan)
import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan) Add trial period days option to initial plans.import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], trial_period_days=settings.PAYMENTS_PLANS[plan].get("trial_period_days"), id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan)
<commit_before>import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan) <commit_msg>Add trial period days option to initial plans.<commit_after>import decimal from django.conf import settings from django.core.management.base import BaseCommand import stripe class Command(BaseCommand): help = "Make sure your Stripe account has the plans" def handle(self, *args, **options): stripe.api_key = settings.STRIPE_SECRET_KEY for plan in settings.PAYMENTS_PLANS: if settings.PAYMENTS_PLANS[plan].get("stripe_plan_id"): price = settings.PAYMENTS_PLANS[plan]["price"] if isinstance(price, decimal.Decimal): amount = int(100 * price) else: amount = int(100 * decimal.Decimal(str(price))) stripe.Plan.create( amount=amount, interval=settings.PAYMENTS_PLANS[plan]["interval"], name=settings.PAYMENTS_PLANS[plan]["name"], currency=settings.PAYMENTS_PLANS[plan]["currency"], trial_period_days=settings.PAYMENTS_PLANS[plan].get("trial_period_days"), id=settings.PAYMENTS_PLANS[plan].get("stripe_plan_id") ) print "Plan created for {0}".format(plan)
2768f7ac50a7b91d984f0f872b647e647d768e93
IPython/lib/tests/test_security.py
IPython/lib/tests/test_security.py
from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False)
# coding: utf-8 from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) def test_passwd_check_unicode(): # GH issue #4524 phash = u'sha1:9dc18846ca26:6bb62badc41fde529c258a8a7fbe259a91313df8' assert passwd_check(phash, u'mypassword³')
Add failing (on Py 2) test for passwd_check with unicode arguments
Add failing (on Py 2) test for passwd_check with unicode arguments
Python
bsd-3-clause
ipython/ipython,ipython/ipython
from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) Add failing (on Py 2) test for passwd_check with unicode arguments
# coding: utf-8 from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) def test_passwd_check_unicode(): # GH issue #4524 phash = u'sha1:9dc18846ca26:6bb62badc41fde529c258a8a7fbe259a91313df8' assert passwd_check(phash, u'mypassword³')
<commit_before>from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) <commit_msg>Add failing (on Py 2) test for passwd_check with unicode arguments<commit_after>
# coding: utf-8 from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) def test_passwd_check_unicode(): # GH issue #4524 phash = u'sha1:9dc18846ca26:6bb62badc41fde529c258a8a7fbe259a91313df8' assert passwd_check(phash, u'mypassword³')
from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) Add failing (on Py 2) test for passwd_check with unicode arguments# coding: utf-8 from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) def test_passwd_check_unicode(): # GH issue #4524 phash = u'sha1:9dc18846ca26:6bb62badc41fde529c258a8a7fbe259a91313df8' assert passwd_check(phash, u'mypassword³')
<commit_before>from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) <commit_msg>Add failing (on Py 2) test for passwd_check with unicode arguments<commit_after># coding: utf-8 from IPython.lib import passwd from IPython.lib.security import passwd_check, salt_len import nose.tools as nt def test_passwd_structure(): p = passwd('passphrase') algorithm, salt, hashed = p.split(':') nt.assert_equal(algorithm, 'sha1') nt.assert_equal(len(salt), salt_len) nt.assert_equal(len(hashed), 40) def test_roundtrip(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, 'passphrase'), True) def test_bad(): p = passwd('passphrase') nt.assert_equal(passwd_check(p, p), False) nt.assert_equal(passwd_check(p, 'a:b:c:d'), False) nt.assert_equal(passwd_check(p, 'a:b'), False) def test_passwd_check_unicode(): # GH issue #4524 phash = u'sha1:9dc18846ca26:6bb62badc41fde529c258a8a7fbe259a91313df8' assert passwd_check(phash, u'mypassword³')
090180470c031967f11870b7a101e1f619a17072
src/ggrc_basic_permissions/roles/ProgramAuditReader.py
src/ggrc_basic_permissions/roles/ProgramAuditReader.py
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] }
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Snapshot", "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] }
Add support for reading snapshots for program audit reader
Add support for reading snapshots for program audit reader
Python
apache-2.0
VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] } Add support for reading snapshots for program audit reader
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Snapshot", "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] }
<commit_before># Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] } <commit_msg>Add support for reading snapshots for program audit reader<commit_after>
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Snapshot", "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] }
# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] } Add support for reading snapshots for program audit reader# Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Snapshot", "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] }
<commit_before># Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] } <commit_msg>Add support for reading snapshots for program audit reader<commit_after># Copyright (C) 2017 Google Inc. # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> scope = "AuditImplied" description = """ A user with the ProgramReader role for a private program will also have this role in the audit context for any audit created for that program. """ permissions = { "read": [ "Snapshot", "Request", "Comment", "Assessment", "Issue", "Audit", "AuditObject", "Meeting", "ObjectDocument", "ObjectPerson", "Relationship", "Document", "Meeting", "UserRole", "Context", ], "create": [], "view_object_page": [ "__GGRC_ALL__" ], "update": [], "delete": [] }
7618cedbc057b2359f5bc9a1b2479c8287b2d64d
desertbot/datastore.py
desertbot/datastore.py
import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item]
import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] def __setitem__(self, key, value): self.data[key] = value def __contains__(self, key): return key in self.data
Add setitem and contains to DataStore
Add setitem and contains to DataStore Adding new things to the DataStore now actually works >_>
Python
mit
DesertBot/DesertBot
import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] Add setitem and contains to DataStore Adding new things to the DataStore now actually works >_>
import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] def __setitem__(self, key, value): self.data[key] = value def __contains__(self, key): return key in self.data
<commit_before>import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] <commit_msg>Add setitem and contains to DataStore Adding new things to the DataStore now actually works >_><commit_after>
import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] def __setitem__(self, key, value): self.data[key] = value def __contains__(self, key): return key in self.data
import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] Add setitem and contains to DataStore Adding new things to the DataStore now actually works >_>import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] def __setitem__(self, key, value): self.data[key] = value def __contains__(self, key): return key in self.data
<commit_before>import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] <commit_msg>Add setitem and contains to DataStore Adding new things to the DataStore now actually works >_><commit_after>import json import os class DataStore(object): def __init__(self, storagePath="desertbot_data.json"): self.storagePath = storagePath self.data = {} self.load() def load(self): if not os.path.exists(self.storagePath): self.save() return with open(self.storagePath) as storageFile: self.data = json.load(storageFile) def save(self): tmpFile = "{}.tmp".format(self.storagePath) with open(tmpFile, "w") as storageFile: storageFile.write(json.dumps(self.data, indent=4)) os.rename(tmpFile, self.storagePath) def __len__(self): return len(self.data) def __iter__(self): return iter(self.data) def __getitem__(self, item): return self.data[item] def __setitem__(self, key, value): self.data[key] = value def __contains__(self, key): return key in self.data
a7e627e60b67f74e1799a272436d58e2eb925e82
inventory_control/database/components.py
inventory_control/database/components.py
""" So this is where all the SQL commands for the Component Stuff exists """ CREATE_SQL = """ CREATE TABLE components ( id INT PRIMARY KEY AUTO_INCREMENT, sku TEXT, type INT, status INT ); CREATE TABLE component_type ( id INT PRIMARY KEY AUTO_INCREMENT, type TEXT ); """ SELECT_ALL_COMPONENTS = """ SELECT * FROM components INNER JOIN component_type ON components.type = component_type.id; """ DROP_SQL = """ DROP TABLE components; DROP TABLE component_type; """
Move all SQL commands to one set of files
Move all SQL commands to one set of files
Python
mit
worldcomputerxchange/inventory-control,codeforsanjose/inventory-control
""" So this is where all the SQL commands for the Component Stuff exists """ CREATE_SQL = """ CREATE TABLE components ( id INT PRIMARY KEY AUTO_INCREMENT, sku TEXT, type INT, status INT ); CREATE TABLE component_type ( id INT PRIMARY KEY AUTO_INCREMENT, type TEXT ); """ SELECT_ALL_COMPONENTS = """ SELECT * FROM components INNER JOIN component_type ON components.type = component_type.id; """ DROP_SQL = """ DROP TABLE components; DROP TABLE component_type; """ Move all SQL commands to one set of files
<commit_before>""" So this is where all the SQL commands for the Component Stuff exists """ CREATE_SQL = """ CREATE TABLE components ( id INT PRIMARY KEY AUTO_INCREMENT, sku TEXT, type INT, status INT ); CREATE TABLE component_type ( id INT PRIMARY KEY AUTO_INCREMENT, type TEXT ); """ SELECT_ALL_COMPONENTS = """ SELECT * FROM components INNER JOIN component_type ON components.type = component_type.id; """ DROP_SQL = """ DROP TABLE components; DROP TABLE component_type; """ <commit_msg>Move all SQL commands to one set of files<commit_after>
""" So this is where all the SQL commands for the Component Stuff exists """ CREATE_SQL = """ CREATE TABLE components ( id INT PRIMARY KEY AUTO_INCREMENT, sku TEXT, type INT, status INT ); CREATE TABLE component_type ( id INT PRIMARY KEY AUTO_INCREMENT, type TEXT ); """ SELECT_ALL_COMPONENTS = """ SELECT * FROM components INNER JOIN component_type ON components.type = component_type.id; """ DROP_SQL = """ DROP TABLE components; DROP TABLE component_type; """ Move all SQL commands to one set of files
<commit_before>""" So this is where all the SQL commands for the Component Stuff exists """ CREATE_SQL = """ CREATE TABLE components ( id INT PRIMARY KEY AUTO_INCREMENT, sku TEXT, type INT, status INT ); CREATE TABLE component_type ( id INT PRIMARY KEY AUTO_INCREMENT, type TEXT ); """ SELECT_ALL_COMPONENTS = """ SELECT * FROM components INNER JOIN component_type ON components.type = component_type.id; """ DROP_SQL = """ DROP TABLE components; DROP TABLE component_type; """ <commit_msg>Move all SQL commands to one set of files<commit_after>
1475ae4f18094e047d0b110fe6526f044defa058
manage.py
manage.py
#!/usr/bin/env python import os import sys import dotenv dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
#!/usr/bin/env python import os import sys import warnings import dotenv with warnings.catch_warnings(): warnings.simplefilter("ignore") dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Disable the warning about .env files not being present.
Disable the warning about .env files not being present.
Python
mit
OpenCanada/website,OpenCanada/website,OpenCanada/website,OpenCanada/website
#!/usr/bin/env python import os import sys import dotenv dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) Disable the warning about .env files not being present.
#!/usr/bin/env python import os import sys import warnings import dotenv with warnings.catch_warnings(): warnings.simplefilter("ignore") dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
<commit_before>#!/usr/bin/env python import os import sys import dotenv dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) <commit_msg>Disable the warning about .env files not being present.<commit_after>
#!/usr/bin/env python import os import sys import warnings import dotenv with warnings.catch_warnings(): warnings.simplefilter("ignore") dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
#!/usr/bin/env python import os import sys import dotenv dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) Disable the warning about .env files not being present.#!/usr/bin/env python import os import sys import warnings import dotenv with warnings.catch_warnings(): warnings.simplefilter("ignore") dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
<commit_before>#!/usr/bin/env python import os import sys import dotenv dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) <commit_msg>Disable the warning about .env files not being present.<commit_after>#!/usr/bin/env python import os import sys import warnings import dotenv with warnings.catch_warnings(): warnings.simplefilter("ignore") dotenv.read_dotenv() if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "opencanada.settings") from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
b3bde3bf3eecaf20c7fb8ed2bcf34992a5158965
fabric_rundeck/__main__.py
fabric_rundeck/__main__.py
# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(): json.dump(visit_fabfile(), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main()
# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(args): path = None if args: path = args[0] json.dump(visit_fabfile(path), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main(sys.argv[1:])
Allow passing in a path to a fabfile.
Allow passing in a path to a fabfile.
Python
apache-2.0
coderanger/fabric-rundeck
# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(): json.dump(visit_fabfile(), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main() Allow passing in a path to a fabfile.
# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(args): path = None if args: path = args[0] json.dump(visit_fabfile(path), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main(sys.argv[1:])
<commit_before># # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(): json.dump(visit_fabfile(), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main() <commit_msg>Allow passing in a path to a fabfile.<commit_after>
# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(args): path = None if args: path = args[0] json.dump(visit_fabfile(path), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main(sys.argv[1:])
# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(): json.dump(visit_fabfile(), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main() Allow passing in a path to a fabfile.# # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(args): path = None if args: path = args[0] json.dump(visit_fabfile(path), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main(sys.argv[1:])
<commit_before># # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(): json.dump(visit_fabfile(), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main() <commit_msg>Allow passing in a path to a fabfile.<commit_after># # Author:: Noah Kantrowitz <noah@coderanger.net> # # Copyright 2014, Noah Kantrowitz # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import sys from .visitor import visit_fabfile def main(args): path = None if args: path = args[0] json.dump(visit_fabfile(path), sys.stdout, indent=2 if sys.stdout.isatty() else None) if __name__ == '__main__': main(sys.argv[1:])
76f697cad3e5dcaeb91c10e6c5f21db23ed8e407
helpdesk/tasks.py
helpdesk/tasks.py
from celery import task from .email import process_email @task() def helpdesk_process_email(): process_email()
from celery.decorators import task from .email import process_email @task() def helpdesk_process_email(): process_email()
Fix import of celery decorator "task".
Fix import of celery decorator "task".
Python
bsd-3-clause
django-helpdesk/django-helpdesk,gwasser/django-helpdesk,rossp/django-helpdesk,rossp/django-helpdesk,rossp/django-helpdesk,rossp/django-helpdesk,gwasser/django-helpdesk,gwasser/django-helpdesk,django-helpdesk/django-helpdesk,django-helpdesk/django-helpdesk,gwasser/django-helpdesk,django-helpdesk/django-helpdesk
from celery import task from .email import process_email @task() def helpdesk_process_email(): process_email() Fix import of celery decorator "task".
from celery.decorators import task from .email import process_email @task() def helpdesk_process_email(): process_email()
<commit_before>from celery import task from .email import process_email @task() def helpdesk_process_email(): process_email() <commit_msg>Fix import of celery decorator "task".<commit_after>
from celery.decorators import task from .email import process_email @task() def helpdesk_process_email(): process_email()
from celery import task from .email import process_email @task() def helpdesk_process_email(): process_email() Fix import of celery decorator "task".from celery.decorators import task from .email import process_email @task() def helpdesk_process_email(): process_email()
<commit_before>from celery import task from .email import process_email @task() def helpdesk_process_email(): process_email() <commit_msg>Fix import of celery decorator "task".<commit_after>from celery.decorators import task from .email import process_email @task() def helpdesk_process_email(): process_email()
ac2e251f165c4d8a11fe65bbfbf1562ea2020e97
docs/dummy-settings.py
docs/dummy-settings.py
DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } }
DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } } SECRET_KEY = "NOT SECRET"
Fix the exception about the missing secret key when generating docs.
Fix the exception about the missing secret key when generating docs.
Python
bsd-3-clause
littleweaver/django-daguerre,mislavcimpersak/django-daguerre,Styria-Digital/django-daguerre,littleweaver/django-daguerre,mislavcimpersak/django-daguerre,Styria-Digital/django-daguerre
DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } }Fix the exception about the missing secret key when generating docs.
DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } } SECRET_KEY = "NOT SECRET"
<commit_before>DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } }<commit_msg>Fix the exception about the missing secret key when generating docs.<commit_after>
DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } } SECRET_KEY = "NOT SECRET"
DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } }Fix the exception about the missing secret key when generating docs.DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } } SECRET_KEY = "NOT SECRET"
<commit_before>DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } }<commit_msg>Fix the exception about the missing secret key when generating docs.<commit_after>DATABASES = { "default": { "NAME": ":memory:", "ENGINE": "django.db.backends.sqlite3", } } SECRET_KEY = "NOT SECRET"
2d0a5f56acf9035f883849e780fe36f7534a4251
TWLight/ezproxy/urls.py
TWLight/ezproxy/urls.py
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>ezp\.([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
Use a more precise pattern to id ^R ezproxy url tokens.
Use a more precise pattern to id ^R ezproxy url tokens.
Python
mit
WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]Use a more precise pattern to id ^R ezproxy url tokens.
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>ezp\.([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
<commit_before>from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]<commit_msg>Use a more precise pattern to id ^R ezproxy url tokens.<commit_after>
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>ezp\.([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]Use a more precise pattern to id ^R ezproxy url tokens.from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>ezp\.([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
<commit_before>from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>(ezp\.[a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]<commit_msg>Use a more precise pattern to id ^R ezproxy url tokens.<commit_after>from django.conf.urls import url from django.contrib.auth.decorators import login_required from . import views urlpatterns = [ url(r'^u/(?P<url>http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_u' ), url(r'^r/(?P<token>ezp\.([a-zA-Z]|[0-9]|[$-_@.&+])+)$', login_required(views.EZProxyAuth.as_view()), name='ezproxy_auth_r' ), ]
bef9fb7f778666e602bfc5b27a65888f7459d0f9
blog/forms.py
blog/forms.py
from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',)
from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) def save(self, user, post, commit=True): comment = super(CommentForm, self).save(commit=False) comment.user = user comment.post = post if commit: comment.save() return comment
Add a custom save() method to CommentForm
Add a custom save() method to CommentForm
Python
mit
andreagrandi/bloggato,andreagrandi/bloggato
from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) Add a custom save() method to CommentForm
from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) def save(self, user, post, commit=True): comment = super(CommentForm, self).save(commit=False) comment.user = user comment.post = post if commit: comment.save() return comment
<commit_before>from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) <commit_msg>Add a custom save() method to CommentForm<commit_after>
from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) def save(self, user, post, commit=True): comment = super(CommentForm, self).save(commit=False) comment.user = user comment.post = post if commit: comment.save() return comment
from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) Add a custom save() method to CommentFormfrom .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) def save(self, user, post, commit=True): comment = super(CommentForm, self).save(commit=False) comment.user = user comment.post = post if commit: comment.save() return comment
<commit_before>from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) <commit_msg>Add a custom save() method to CommentForm<commit_after>from .models import BlogPost, BlogComment from django.forms import ModelForm class BlogPostForm(ModelForm): class Meta: model = BlogPost exclude = ('user',) def save(self, user, commit=True): post = super(BlogPostForm, self).save(commit=False) post.user = user if commit: post.save() return post class CommentForm(ModelForm): class Meta: model = BlogComment exclude = ('post', 'user',) def save(self, user, post, commit=True): comment = super(CommentForm, self).save(commit=False) comment.user = user comment.post = post if commit: comment.save() return comment
34072121b9fc6d1b0ec740cb3d22034971ef0141
comics/search/urls.py
comics/search/urls.py
from django.conf.urls.defaults import * urlpatterns = patterns('', (r'^', include('haystack.urls')), )
from django.conf.urls.defaults import * from haystack.views import SearchView from haystack.forms import SearchForm urlpatterns = patterns('', url(r'^$', SearchView(form_class=SearchForm), name='haystack_search'), )
Convert to simpler search form
Convert to simpler search form
Python
agpl-3.0
datagutten/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,datagutten/comics,jodal/comics,klette/comics
from django.conf.urls.defaults import * urlpatterns = patterns('', (r'^', include('haystack.urls')), ) Convert to simpler search form
from django.conf.urls.defaults import * from haystack.views import SearchView from haystack.forms import SearchForm urlpatterns = patterns('', url(r'^$', SearchView(form_class=SearchForm), name='haystack_search'), )
<commit_before>from django.conf.urls.defaults import * urlpatterns = patterns('', (r'^', include('haystack.urls')), ) <commit_msg>Convert to simpler search form<commit_after>
from django.conf.urls.defaults import * from haystack.views import SearchView from haystack.forms import SearchForm urlpatterns = patterns('', url(r'^$', SearchView(form_class=SearchForm), name='haystack_search'), )
from django.conf.urls.defaults import * urlpatterns = patterns('', (r'^', include('haystack.urls')), ) Convert to simpler search formfrom django.conf.urls.defaults import * from haystack.views import SearchView from haystack.forms import SearchForm urlpatterns = patterns('', url(r'^$', SearchView(form_class=SearchForm), name='haystack_search'), )
<commit_before>from django.conf.urls.defaults import * urlpatterns = patterns('', (r'^', include('haystack.urls')), ) <commit_msg>Convert to simpler search form<commit_after>from django.conf.urls.defaults import * from haystack.views import SearchView from haystack.forms import SearchForm urlpatterns = patterns('', url(r'^$', SearchView(form_class=SearchForm), name='haystack_search'), )
09fe8731e733081fff2595b04db63b93d0f4b91b
spider.py
spider.py
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem()
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() dataset['url'] = response.url
Add url assignment to each crawled dataset item
Add url assignment to each crawled dataset item
Python
mit
MaxLikelihood/CODE
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() Add url assignment to each crawled dataset item
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() dataset['url'] = response.url
<commit_before>from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() <commit_msg>Add url assignment to each crawled dataset item<commit_after>
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() dataset['url'] = response.url
from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() Add url assignment to each crawled dataset itemfrom scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() dataset['url'] = response.url
<commit_before>from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() <commit_msg>Add url assignment to each crawled dataset item<commit_after>from scrapy.contrib.spiders import CrawlSpider, Rule from scrapy.contrib.linkextractors.sgml import SgmlLinkExtractor from scrapy.selector import Selector from dataset import DatasetItem class DatasetSpider(CrawlSpider): name = 'dataset' allowed_domains = ['data.gc.ca/data/en'] start_urls = ['http://data.gc.ca/data/en/dataset?page=1'] rules = [Rule(SgmlLinkExtractor(allow=['/dataset/[0-9a-z]{8}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{4}-[0-9a-z]{12}']), 'parse_dataset')] def parse_dataset(self, response): sel = Selector(response) dataset = DatasetItem() dataset['url'] = response.url
94e6443a3eeb1bf76121ab2030a90c5631f32ff8
landscapesim/serializers/regions.py
landscapesim/serializers/regions.py
import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url') def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id])
import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() data = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url', 'data',) def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) def get_data(self, obj): if self.context.get('request').GET.get('return_data') == 'true': return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data return None
Allow data to be returned on the same request.
Allow data to be returned on the same request.
Python
bsd-3-clause
consbio/landscapesim,consbio/landscapesim,consbio/landscapesim
import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url') def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) Allow data to be returned on the same request.
import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() data = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url', 'data',) def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) def get_data(self, obj): if self.context.get('request').GET.get('return_data') == 'true': return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data return None
<commit_before>import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url') def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) <commit_msg>Allow data to be returned on the same request.<commit_after>
import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() data = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url', 'data',) def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) def get_data(self, obj): if self.context.get('request').GET.get('return_data') == 'true': return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data return None
import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url') def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) Allow data to be returned on the same request.import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() data = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url', 'data',) def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) def get_data(self, obj): if self.context.get('request').GET.get('return_data') == 'true': return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data return None
<commit_before>import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url') def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) <commit_msg>Allow data to be returned on the same request.<commit_after>import json from rest_framework import serializers from django.core.urlresolvers import reverse from landscapesim.models import Region class ReportingUnitSerializer(serializers.Serializer): type = serializers.SerializerMethodField() properties = serializers.SerializerMethodField() geometry = serializers.SerializerMethodField() class Meta: fields = ('type', 'geometry', 'properties',) def get_type(self, obj): return 'Feature' def get_geometry(self, obj): return json.loads(obj.polygon.json) def get_properties(self, obj): return { 'id': obj.id, 'unit_id': obj.unit_id, 'name': obj.name } class RegionSerializer(serializers.ModelSerializer): url = serializers.SerializerMethodField() data = serializers.SerializerMethodField() class Meta: model = Region fields = ('id', 'name', 'url', 'data',) def get_url(self, obj): return reverse('region-reporting-units', args=[obj.id]) def get_data(self, obj): if self.context.get('request').GET.get('return_data') == 'true': return ReportingUnitSerializer(obj.reporting_units.all(), many=True).data return None
856f2328ccae97286b621c736716b37967027be8
tests/views.py
tests/views.py
# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.template.response import TemplateResponse from django.shortcuts import render from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/')
# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.template.response import TemplateResponse from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/')
Fix an imports ordering problem
Fix an imports ordering problem
Python
bsd-3-clause
tim-schilling/django-debug-toolbar,spookylukey/django-debug-toolbar,tim-schilling/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,tim-schilling/django-debug-toolbar,spookylukey/django-debug-toolbar,spookylukey/django-debug-toolbar,jazzband/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,jazzband/django-debug-toolbar,jazzband/django-debug-toolbar
# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.template.response import TemplateResponse from django.shortcuts import render from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/') Fix an imports ordering problem
# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.template.response import TemplateResponse from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/')
<commit_before># coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.template.response import TemplateResponse from django.shortcuts import render from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/') <commit_msg>Fix an imports ordering problem<commit_after>
# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.template.response import TemplateResponse from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/')
# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.template.response import TemplateResponse from django.shortcuts import render from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/') Fix an imports ordering problem# coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.template.response import TemplateResponse from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/')
<commit_before># coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.template.response import TemplateResponse from django.shortcuts import render from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/') <commit_msg>Fix an imports ordering problem<commit_after># coding: utf-8 from __future__ import absolute_import, unicode_literals from django.contrib.auth.models import User from django.http import HttpResponse, HttpResponseRedirect from django.shortcuts import render from django.template.response import TemplateResponse from django.views.decorators.cache import cache_page def execute_sql(request): list(User.objects.all()) return HttpResponse() def regular_view(request, title): return render(request, 'basic.html', {'title': title}) def template_response_view(request, title): return TemplateResponse(request, 'basic.html', {'title': title}) def new_user(request, username='joe'): User.objects.create_user(username=username) return render(request, 'basic.html', {'title': 'new user'}) def resolving_view(request, arg1, arg2): # see test_url_resolving in tests.py return HttpResponse() @cache_page(60) def cached_view(request): return HttpResponse() def regular_jinjia_view(request, title): return render(request, 'jinja2/basic.jinja', {'title': title}) def listcomp_view(request): lst = [i for i in range(50000) if i % 2 == 0] return render(request, 'basic.html', {'title': 'List comprehension', 'lst': lst}) def redirect_view(request): return HttpResponseRedirect('/regular/redirect/')
f930fc7a23ff1337260a24b54c7b3d2b3d08f0d4
src/survey_stats/state.py
src/survey_stats/state.py
from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['brfss_pre2011'] = SurveyDataset.load_dataset('config/data/brfss_pre2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc)
from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc)
Check if brfss data for years 2000 to 2010 available
Check if brfss data for years 2000 to 2010 available
Python
bsd-2-clause
semanticbits/survey_stats,semanticbits/survey_stats
from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['brfss_pre2011'] = SurveyDataset.load_dataset('config/data/brfss_pre2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) Check if brfss data for years 2000 to 2010 available
from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc)
<commit_before>from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['brfss_pre2011'] = SurveyDataset.load_dataset('config/data/brfss_pre2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) <commit_msg>Check if brfss data for years 2000 to 2010 available<commit_after>
from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc)
from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['brfss_pre2011'] = SurveyDataset.load_dataset('config/data/brfss_pre2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) Check if brfss data for years 2000 to 2010 availablefrom survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc)
<commit_before>from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['brfss_pre2011'] = SurveyDataset.load_dataset('config/data/brfss_pre2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) <commit_msg>Check if brfss data for years 2000 to 2010 available<commit_after>from survey_stats.datasets import SurveyDataset from survey_stats import log lgr = log.getLogger(__name__) dset = {} def initialize(dbc, cache, init_des, use_feather, init_svy, init_soc): lgr.info('was summoned into being, loading up some data', dbc=dbc, cache=cache, use_feather=use_feather) dset['brfss'] = SurveyDataset.load_dataset('config/data/brfss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['yrbss'] = SurveyDataset.load_dataset('config/data/yrbss.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams'] = SurveyDataset.load_dataset('config/data/prams.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc) dset['prams_p2011'] = SurveyDataset.load_dataset('config/data/prams_p2011.yaml', dbc, cache, init_des, use_feather, init_svy, init_soc)
f2066b0f3fd90583d8b80bb8b09b8168b9d29628
kiva/__init__.py
kiva/__init__.py
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from __future__ import absolute_import from .constants import * from .fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from constants import * from fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os
Remove the explicit .relative import since the * import does not work with Python 2.5
BUG: Remove the explicit .relative import since the * import does not work with Python 2.5
Python
bsd-3-clause
tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from __future__ import absolute_import from .constants import * from .fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os BUG: Remove the explicit .relative import since the * import does not work with Python 2.5
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from constants import * from fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os
<commit_before>#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from __future__ import absolute_import from .constants import * from .fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os <commit_msg>BUG: Remove the explicit .relative import since the * import does not work with Python 2.5<commit_after>
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from constants import * from fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os
#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from __future__ import absolute_import from .constants import * from .fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os BUG: Remove the explicit .relative import since the * import does not work with Python 2.5#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from constants import * from fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os
<commit_before>#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from __future__ import absolute_import from .constants import * from .fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os <commit_msg>BUG: Remove the explicit .relative import since the * import does not work with Python 2.5<commit_after>#------------------------------------------------------------------------------ # Copyright (c) 2005, Enthought, Inc. # some parts copyright 2002 by Space Telescope Science Institute # All rights reserved. # # This software is provided without warranty under the terms of the BSD # license included in enthought/LICENSE.txt and may be redistributed only # under the conditions described in the aforementioned license. The license # is also available online at http://www.enthought.com/licenses/BSD.txt # Thanks for using Enthought open source! #------------------------------------------------------------------------------ """ A multi-platform DisplayPDF vector drawing engine. Part of the Enable project of the Enthought Tool Suite. """ from constants import * from fonttools import Font import os if os.environ.has_key('KIVA_WISHLIST'): from warnings import warn warn("Use of the KIVA_WISHLIST environment variable to select Kiva backends" "is no longer supported.") del os
5b342d7a2eacea955c851158c8e0e6e33216e780
webcal.py
webcal.py
import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class MainPage(webapp2.RequestHandler): def get(self): result = urlfetch.fetch( 'http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123') calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() filtered_cal.add('prodid', '-//Filtered Arsenal Calendar//foo//') filtered_cal.add('version', '2.0') for component in calendar.subcomponents: if 'LOCATION' in component: if 'Emirates Stadium' in component['LOCATION']: filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.out.write(filtered_cal.to_ical()) app = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True)
import logging import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class CalendarFilterPage(webapp2.RequestHandler): def get(self): calendar_url = self.request.get('url') result = urlfetch.fetch(calendar_url) # http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123 calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() for k, v in calendar.items(): filtered_cal.add(k, v) filter_spec = FilterSpec(self.request.get('filter')) for component in calendar.subcomponents: if filter_spec.ShouldFilter(component): filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.headers.add( 'Content-Disposition', 'attachment; filename="calendar.ical"') self.response.out.write(filtered_cal.to_ical()) class FilterSpec(object): def __init__(self, filter_spec): split = filter_spec.split(':') self.property = split[0] self.content = split[1] def ShouldFilter(self, event): return self.property in event and self.content in event[self.property] app = webapp2.WSGIApplication([ ('/calendar', CalendarFilterPage), ], debug=True)
Make filtering & url generic
Make filtering & url generic
Python
mit
hatstand/webcal-filter
import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class MainPage(webapp2.RequestHandler): def get(self): result = urlfetch.fetch( 'http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123') calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() filtered_cal.add('prodid', '-//Filtered Arsenal Calendar//foo//') filtered_cal.add('version', '2.0') for component in calendar.subcomponents: if 'LOCATION' in component: if 'Emirates Stadium' in component['LOCATION']: filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.out.write(filtered_cal.to_ical()) app = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) Make filtering & url generic
import logging import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class CalendarFilterPage(webapp2.RequestHandler): def get(self): calendar_url = self.request.get('url') result = urlfetch.fetch(calendar_url) # http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123 calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() for k, v in calendar.items(): filtered_cal.add(k, v) filter_spec = FilterSpec(self.request.get('filter')) for component in calendar.subcomponents: if filter_spec.ShouldFilter(component): filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.headers.add( 'Content-Disposition', 'attachment; filename="calendar.ical"') self.response.out.write(filtered_cal.to_ical()) class FilterSpec(object): def __init__(self, filter_spec): split = filter_spec.split(':') self.property = split[0] self.content = split[1] def ShouldFilter(self, event): return self.property in event and self.content in event[self.property] app = webapp2.WSGIApplication([ ('/calendar', CalendarFilterPage), ], debug=True)
<commit_before>import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class MainPage(webapp2.RequestHandler): def get(self): result = urlfetch.fetch( 'http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123') calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() filtered_cal.add('prodid', '-//Filtered Arsenal Calendar//foo//') filtered_cal.add('version', '2.0') for component in calendar.subcomponents: if 'LOCATION' in component: if 'Emirates Stadium' in component['LOCATION']: filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.out.write(filtered_cal.to_ical()) app = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) <commit_msg>Make filtering & url generic<commit_after>
import logging import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class CalendarFilterPage(webapp2.RequestHandler): def get(self): calendar_url = self.request.get('url') result = urlfetch.fetch(calendar_url) # http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123 calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() for k, v in calendar.items(): filtered_cal.add(k, v) filter_spec = FilterSpec(self.request.get('filter')) for component in calendar.subcomponents: if filter_spec.ShouldFilter(component): filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.headers.add( 'Content-Disposition', 'attachment; filename="calendar.ical"') self.response.out.write(filtered_cal.to_ical()) class FilterSpec(object): def __init__(self, filter_spec): split = filter_spec.split(':') self.property = split[0] self.content = split[1] def ShouldFilter(self, event): return self.property in event and self.content in event[self.property] app = webapp2.WSGIApplication([ ('/calendar', CalendarFilterPage), ], debug=True)
import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class MainPage(webapp2.RequestHandler): def get(self): result = urlfetch.fetch( 'http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123') calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() filtered_cal.add('prodid', '-//Filtered Arsenal Calendar//foo//') filtered_cal.add('version', '2.0') for component in calendar.subcomponents: if 'LOCATION' in component: if 'Emirates Stadium' in component['LOCATION']: filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.out.write(filtered_cal.to_ical()) app = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) Make filtering & url genericimport logging import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class CalendarFilterPage(webapp2.RequestHandler): def get(self): calendar_url = self.request.get('url') result = urlfetch.fetch(calendar_url) # http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123 calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() for k, v in calendar.items(): filtered_cal.add(k, v) filter_spec = FilterSpec(self.request.get('filter')) for component in calendar.subcomponents: if filter_spec.ShouldFilter(component): filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.headers.add( 'Content-Disposition', 'attachment; filename="calendar.ical"') self.response.out.write(filtered_cal.to_ical()) class FilterSpec(object): def __init__(self, filter_spec): split = filter_spec.split(':') self.property = split[0] self.content = split[1] def ShouldFilter(self, event): return self.property in event and self.content in event[self.property] app = webapp2.WSGIApplication([ ('/calendar', CalendarFilterPage), ], debug=True)
<commit_before>import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class MainPage(webapp2.RequestHandler): def get(self): result = urlfetch.fetch( 'http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123') calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() filtered_cal.add('prodid', '-//Filtered Arsenal Calendar//foo//') filtered_cal.add('version', '2.0') for component in calendar.subcomponents: if 'LOCATION' in component: if 'Emirates Stadium' in component['LOCATION']: filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.out.write(filtered_cal.to_ical()) app = webapp2.WSGIApplication([ ('/', MainPage), ], debug=True) <commit_msg>Make filtering & url generic<commit_after>import logging import webapp2 from google.appengine.api import urlfetch from google.appengine.ext import vendor vendor.add('lib') import icalendar class CalendarFilterPage(webapp2.RequestHandler): def get(self): calendar_url = self.request.get('url') result = urlfetch.fetch(calendar_url) # http://www.arsenal.com/_scripts/ical.ics?tid=1006&sid=123 calendar = icalendar.Calendar.from_ical(result.content) filtered_cal = icalendar.Calendar() for k, v in calendar.items(): filtered_cal.add(k, v) filter_spec = FilterSpec(self.request.get('filter')) for component in calendar.subcomponents: if filter_spec.ShouldFilter(component): filtered_cal.add_component(component) self.response.content_type = 'text/calendar' self.response.headers.add( 'Cache-Control', 'max-age=3600') self.response.headers.add( 'Content-Disposition', 'attachment; filename="calendar.ical"') self.response.out.write(filtered_cal.to_ical()) class FilterSpec(object): def __init__(self, filter_spec): split = filter_spec.split(':') self.property = split[0] self.content = split[1] def ShouldFilter(self, event): return self.property in event and self.content in event[self.property] app = webapp2.WSGIApplication([ ('/calendar', CalendarFilterPage), ], debug=True)
6b98fb9a8f3e78d5ee0c73707cd253fc94e4756e
src/ggrc/models/exceptions.py
src/ggrc/models/exceptions.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, "(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass
Fix exception pattern matching for internal unicode strings
Fix exception pattern matching for internal unicode strings
Python
apache-2.0
josthkko/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,vladan-m/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,vladan-m/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, "(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass Fix exception pattern matching for internal unicode strings
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass
<commit_before># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, "(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass <commit_msg>Fix exception pattern matching for internal unicode strings<commit_after>
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, "(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass Fix exception pattern matching for internal unicode strings# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass
<commit_before># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, "(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass <commit_msg>Fix exception pattern matching for internal unicode strings<commit_after># Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: vraj@reciprocitylabs.com # Maintained By: vraj@reciprocitylabs.com import re from sqlalchemy.exc import IntegrityError def translate_message(exception): """ Translates db exceptions to something a user can understand. """ message = exception.message if isinstance(exception, IntegrityError): # TODO: Handle not null, foreign key errors, uniqueness errors with compound keys duplicate_entry_pattern = re.compile(r'\(1062, u?"(Duplicate entry \'[^\']*\')') matches = duplicate_entry_pattern.search(message) if matches: return matches.group(1) else: return message else: return message class ValidationError(Exception): pass
e2438124fc8a645508b22b3713984cdb1288927d
utils/mongo.py
utils/mongo.py
import logging from pymongo import MongoClient from pymongo.errors import DuplicateKeyError from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.client = MongoClient() self.db = self.client.spud self._collections() self.duplicate_error = DuplicateKeyError self.index_error = OperationFailure def _collections(self): self._logger.debug(self.db.collection_names())
import logging from pymongo import MongoClient, database, collection # from pymongo.errors import DuplicateKeyError # from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.db = database.Database(MongoClient(), 'spud') self.print_collections() # self.duplicate_error = DuplicateKeyError # self.index_error = OperationFailure # print all collections def print_collections(self): self._logger.debug(self.db.collection_names()) # return all documents in a collection def fetch_all(self, _collection): return list(collection.Collection(self.db, _collection).find()) # save document to a collection def save(self, _collection, document): collection.Collection(self.db, _collection).save(document)
Make MongoInterface more of an interface
Make MongoInterface more of an interface
Python
mit
spudmind/undertheinfluence,spudmind/undertheinfluence,spudmind/undertheinfluence
import logging from pymongo import MongoClient from pymongo.errors import DuplicateKeyError from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.client = MongoClient() self.db = self.client.spud self._collections() self.duplicate_error = DuplicateKeyError self.index_error = OperationFailure def _collections(self): self._logger.debug(self.db.collection_names()) Make MongoInterface more of an interface
import logging from pymongo import MongoClient, database, collection # from pymongo.errors import DuplicateKeyError # from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.db = database.Database(MongoClient(), 'spud') self.print_collections() # self.duplicate_error = DuplicateKeyError # self.index_error = OperationFailure # print all collections def print_collections(self): self._logger.debug(self.db.collection_names()) # return all documents in a collection def fetch_all(self, _collection): return list(collection.Collection(self.db, _collection).find()) # save document to a collection def save(self, _collection, document): collection.Collection(self.db, _collection).save(document)
<commit_before>import logging from pymongo import MongoClient from pymongo.errors import DuplicateKeyError from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.client = MongoClient() self.db = self.client.spud self._collections() self.duplicate_error = DuplicateKeyError self.index_error = OperationFailure def _collections(self): self._logger.debug(self.db.collection_names()) <commit_msg>Make MongoInterface more of an interface<commit_after>
import logging from pymongo import MongoClient, database, collection # from pymongo.errors import DuplicateKeyError # from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.db = database.Database(MongoClient(), 'spud') self.print_collections() # self.duplicate_error = DuplicateKeyError # self.index_error = OperationFailure # print all collections def print_collections(self): self._logger.debug(self.db.collection_names()) # return all documents in a collection def fetch_all(self, _collection): return list(collection.Collection(self.db, _collection).find()) # save document to a collection def save(self, _collection, document): collection.Collection(self.db, _collection).save(document)
import logging from pymongo import MongoClient from pymongo.errors import DuplicateKeyError from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.client = MongoClient() self.db = self.client.spud self._collections() self.duplicate_error = DuplicateKeyError self.index_error = OperationFailure def _collections(self): self._logger.debug(self.db.collection_names()) Make MongoInterface more of an interfaceimport logging from pymongo import MongoClient, database, collection # from pymongo.errors import DuplicateKeyError # from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.db = database.Database(MongoClient(), 'spud') self.print_collections() # self.duplicate_error = DuplicateKeyError # self.index_error = OperationFailure # print all collections def print_collections(self): self._logger.debug(self.db.collection_names()) # return all documents in a collection def fetch_all(self, _collection): return list(collection.Collection(self.db, _collection).find()) # save document to a collection def save(self, _collection, document): collection.Collection(self.db, _collection).save(document)
<commit_before>import logging from pymongo import MongoClient from pymongo.errors import DuplicateKeyError from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.client = MongoClient() self.db = self.client.spud self._collections() self.duplicate_error = DuplicateKeyError self.index_error = OperationFailure def _collections(self): self._logger.debug(self.db.collection_names()) <commit_msg>Make MongoInterface more of an interface<commit_after>import logging from pymongo import MongoClient, database, collection # from pymongo.errors import DuplicateKeyError # from pymongo.errors import OperationFailure class MongoInterface: def __init__(self): self._logger = logging.getLogger('spud') self.db = database.Database(MongoClient(), 'spud') self.print_collections() # self.duplicate_error = DuplicateKeyError # self.index_error = OperationFailure # print all collections def print_collections(self): self._logger.debug(self.db.collection_names()) # return all documents in a collection def fetch_all(self, _collection): return list(collection.Collection(self.db, _collection).find()) # save document to a collection def save(self, _collection, document): collection.Collection(self.db, _collection).save(document)
2518beaf87feb39d03164d30feff37b6dea2c2ef
tests/integration/states/test_renderers.py
tests/integration/states/test_renderers.py
# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result'])
# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.helpers import flaky class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) @flaky def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result'])
Mark renderer test as flaky for mac tests
Mark renderer test as flaky for mac tests
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) Mark renderer test as flaky for mac tests
# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.helpers import flaky class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) @flaky def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result'])
<commit_before># coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) <commit_msg>Mark renderer test as flaky for mac tests<commit_after>
# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.helpers import flaky class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) @flaky def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result'])
# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) Mark renderer test as flaky for mac tests# coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.helpers import flaky class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) @flaky def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result'])
<commit_before># coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) <commit_msg>Mark renderer test as flaky for mac tests<commit_after># coding: utf-8 ''' Integration tests for renderer functions ''' # Import Python Libs from __future__ import absolute_import, unicode_literals, print_function # Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.helpers import flaky class TestJinjaRenderer(ModuleCase): ''' Validate that ordering works correctly ''' def test_dot_notation(self): ''' Test the Jinja dot-notation syntax for calling execution modules ''' ret = self.run_function('state.sls', ['jinja_dot_notation']) for state_ret in ret.values(): self.assertTrue(state_ret['result']) @flaky def test_salt_contains_function(self): ''' Test if we are able to check if a function exists inside the "salt" wrapper (AliasLoader) which is available on Jinja templates. ''' ret = self.run_function('state.sls', ['jinja_salt_contains_function']) for state_ret in ret.values(): self.assertTrue(state_ret['result'])
eac9585634be34bdd1fabd65c6f22eb66134b6b4
datastage/web/user/views.py
datastage/web/user/views.py
from django_conneg.views import HTMLView from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request):
from django_conneg.views import HTMLView #from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request): pass
Fix syntax, even if not functionality.
Fix syntax, even if not functionality.
Python
mit
dataflow/DataStage,dataflow/DataStage,dataflow/DataStage
from django_conneg.views import HTMLView from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request):Fix syntax, even if not functionality.
from django_conneg.views import HTMLView #from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request): pass
<commit_before>from django_conneg.views import HTMLView from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request):<commit_msg>Fix syntax, even if not functionality.<commit_after>
from django_conneg.views import HTMLView #from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request): pass
from django_conneg.views import HTMLView from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request):Fix syntax, even if not functionality.from django_conneg.views import HTMLView #from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request): pass
<commit_before>from django_conneg.views import HTMLView from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request):<commit_msg>Fix syntax, even if not functionality.<commit_after>from django_conneg.views import HTMLView #from django.contrib.auth.forms.PasswordChangeForm class IndexView(HTMLView): def get(self, request): return self.render(request, {}, 'user/index') class PasswordView(HTMLView): def get(self, request): pass
e95deac720589eaf81dbb54cadcef9a3459f7d02
youtube/downloader.py
youtube/downloader.py
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
Add comment explaining why use choose bestaudio for audio downloads.
Add comment explaining why use choose bestaudio for audio downloads.
Python
mit
tpcstld/youtube,tpcstld/youtube,tpcstld/youtube
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title Add comment explaining why use choose bestaudio for audio downloads.
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
<commit_before>import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title <commit_msg>Add comment explaining why use choose bestaudio for audio downloads.<commit_after>
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title Add comment explaining why use choose bestaudio for audio downloads.import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
<commit_before>import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title <commit_msg>Add comment explaining why use choose bestaudio for audio downloads.<commit_after>import os from youtube_dl import YoutubeDL from youtube_dl import MaxDownloadsReached def download(url, audio_only): """Downloads the youtube video from the url Args: url: The youtube URL pointing to the video to download. audio_only: True if we only want to download the best audio. Returns: A (file name, video title) tuple. The file name is ONLY the file name, and does not include the file path. """ downloader = YoutubeDL() downloader.add_default_info_extractors() downloader.params['outtmpl'] = os.path.join(os.getcwd(), 'temp/%(id)s.%(ext)s') downloader.params['verbose'] = True downloader.params['cachedir'] = None downloader.params['noplaylist'] = True downloader.params['max_downloads'] = 1 # We don't really have to do this, but YoutubeDL sometimes has a problem # combining the video and audio portions of webm files, so this is a good # workaround since we really only care about the audio part. if audio_only: downloader.params['format'] = 'bestaudio' try: info = downloader.extract_info(url) except MaxDownloadsReached: info = downloader.extract_info(url, download=False) file_name = downloader.prepare_filename(info) file_name = file_name.encode('ascii', 'ignore') title = info.get('title', os.path.basename(file_name)) return file_name, title
3fa7cd977d250629f0ee619b1e5088872bcb051a
activities/runkeeper/signals.py
activities/runkeeper/signals.py
from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile)
from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return # The UserSocialAuth is created before the whole OAuth2 process is complete if 'access_token' not in instance.extra_data: return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile)
Fix timing issue in signal
Fix timing issue in signal
Python
mit
OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans
from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile) Fix timing issue in signal
from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return # The UserSocialAuth is created before the whole OAuth2 process is complete if 'access_token' not in instance.extra_data: return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile)
<commit_before>from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile) <commit_msg>Fix timing issue in signal<commit_after>
from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return # The UserSocialAuth is created before the whole OAuth2 process is complete if 'access_token' not in instance.extra_data: return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile)
from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile) Fix timing issue in signalfrom django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return # The UserSocialAuth is created before the whole OAuth2 process is complete if 'access_token' not in instance.extra_data: return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile)
<commit_before>from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile) <commit_msg>Fix timing issue in signal<commit_after>from django.db.models.signals import post_save from django.dispatch import receiver from data_import.signal_helpers import task_signal from social.apps.django_app.default.models import UserSocialAuth from .models import DataFile @receiver(post_save, sender=UserSocialAuth) def post_save_cb(sender, instance, created, raw, update_fields, **kwargs): """ Initiate retrieval of the data corresponding to a RunKeeper access token. """ if instance.provider != 'runkeeper': return # The UserSocialAuth is created before the whole OAuth2 process is complete if 'access_token' not in instance.extra_data: return task_params = { 'access_token': instance.extra_data['access_token'] } task_signal(instance, created, raw, task_params, DataFile)
e2d565bb283f2425005a93c0dd3abc9a422520d7
nodeconductor/iaas/cost_tracking.py
nodeconductor/iaas/cost_tracking.py
import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: # prorata estimate calculation based on daily usage cost sd = invoice['start_date'] ed = invoice['end_date'] today = datetime.date.today() if not sd <= today <= ed: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue if sd.year == today.year and sd.month == today.month: days_in_month = calendar.monthrange(sd.year, sd.month)[1] days = sd.replace(day=days_in_month) - sd elif ed.year == today.year and ed.month == today.month: days = ed - ed.replace(day=1) daily_cost = invoice['amount'] / ((today - sd).days + 1) cost = daily_cost * (days.days + 1) yield instance, cost
import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: today = datetime.date.today() if not invoice['start_date'] <= today <= invoice['end_date']: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue # prorata monthly cost estimate based on daily usage cost days_in_month = calendar.monthrange(today.year, today.month)[1] daily_cost = invoice['amount'] / ((today - invoice['start_date']).days + 1) cost = daily_cost * days_in_month yield instance, cost
Fix killbill cost estimate logic
Fix killbill cost estimate logic - NC-738
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: # prorata estimate calculation based on daily usage cost sd = invoice['start_date'] ed = invoice['end_date'] today = datetime.date.today() if not sd <= today <= ed: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue if sd.year == today.year and sd.month == today.month: days_in_month = calendar.monthrange(sd.year, sd.month)[1] days = sd.replace(day=days_in_month) - sd elif ed.year == today.year and ed.month == today.month: days = ed - ed.replace(day=1) daily_cost = invoice['amount'] / ((today - sd).days + 1) cost = daily_cost * (days.days + 1) yield instance, cost Fix killbill cost estimate logic - NC-738
import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: today = datetime.date.today() if not invoice['start_date'] <= today <= invoice['end_date']: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue # prorata monthly cost estimate based on daily usage cost days_in_month = calendar.monthrange(today.year, today.month)[1] daily_cost = invoice['amount'] / ((today - invoice['start_date']).days + 1) cost = daily_cost * days_in_month yield instance, cost
<commit_before>import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: # prorata estimate calculation based on daily usage cost sd = invoice['start_date'] ed = invoice['end_date'] today = datetime.date.today() if not sd <= today <= ed: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue if sd.year == today.year and sd.month == today.month: days_in_month = calendar.monthrange(sd.year, sd.month)[1] days = sd.replace(day=days_in_month) - sd elif ed.year == today.year and ed.month == today.month: days = ed - ed.replace(day=1) daily_cost = invoice['amount'] / ((today - sd).days + 1) cost = daily_cost * (days.days + 1) yield instance, cost <commit_msg>Fix killbill cost estimate logic - NC-738<commit_after>
import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: today = datetime.date.today() if not invoice['start_date'] <= today <= invoice['end_date']: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue # prorata monthly cost estimate based on daily usage cost days_in_month = calendar.monthrange(today.year, today.month)[1] daily_cost = invoice['amount'] / ((today - invoice['start_date']).days + 1) cost = daily_cost * days_in_month yield instance, cost
import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: # prorata estimate calculation based on daily usage cost sd = invoice['start_date'] ed = invoice['end_date'] today = datetime.date.today() if not sd <= today <= ed: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue if sd.year == today.year and sd.month == today.month: days_in_month = calendar.monthrange(sd.year, sd.month)[1] days = sd.replace(day=days_in_month) - sd elif ed.year == today.year and ed.month == today.month: days = ed - ed.replace(day=1) daily_cost = invoice['amount'] / ((today - sd).days + 1) cost = daily_cost * (days.days + 1) yield instance, cost Fix killbill cost estimate logic - NC-738import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: today = datetime.date.today() if not invoice['start_date'] <= today <= invoice['end_date']: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue # prorata monthly cost estimate based on daily usage cost days_in_month = calendar.monthrange(today.year, today.month)[1] daily_cost = invoice['amount'] / ((today - invoice['start_date']).days + 1) cost = daily_cost * days_in_month yield instance, cost
<commit_before>import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: # prorata estimate calculation based on daily usage cost sd = invoice['start_date'] ed = invoice['end_date'] today = datetime.date.today() if not sd <= today <= ed: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue if sd.year == today.year and sd.month == today.month: days_in_month = calendar.monthrange(sd.year, sd.month)[1] days = sd.replace(day=days_in_month) - sd elif ed.year == today.year and ed.month == today.month: days = ed - ed.replace(day=1) daily_cost = invoice['amount'] / ((today - sd).days + 1) cost = daily_cost * (days.days + 1) yield instance, cost <commit_msg>Fix killbill cost estimate logic - NC-738<commit_after>import logging import calendar import datetime from nodeconductor.cost_tracking import CostTrackingStrategy from nodeconductor.iaas.models import Instance from nodeconductor.structure import ServiceBackendError logger = logging.getLogger(__name__) class IaaSCostTracking(CostTrackingStrategy): @classmethod def get_costs_estimates(cls, customer=None): # TODO: move this logic to IaaS backend method 'get_cost_estimate' # and get rid from app dependent cost tracking together with entry points queryset = Instance.objects.exclude(billing_backend_id='') if customer: queryset = queryset.filter(customer=customer) for instance in queryset.iterator(): try: backend = instance.order.backend invoice = backend.get_invoice_estimate(instance) except ServiceBackendError as e: logger.error( "Failed to get price estimate for resource %s: %s", instance, e) else: today = datetime.date.today() if not invoice['start_date'] <= today <= invoice['end_date']: logger.error( "Wrong invoice estimate for resource %s: %s", instance, invoice) continue # prorata monthly cost estimate based on daily usage cost days_in_month = calendar.monthrange(today.year, today.month)[1] daily_cost = invoice['amount'] / ((today - invoice['start_date']).days + 1) cost = daily_cost * days_in_month yield instance, cost
8f2c8f6e9dec950e0ddd46f563b65f64424cadd1
erpnext/accounts/doctype/sales_invoice/sales_invoice_dashboard.py
erpnext/accounts/doctype/sales_invoice/sales_invoice_dashboard.py
from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'], 'Delivery Note': ['items', 'delivery_note'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }
from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }
Revert sales invoice dn link issue
Revert sales invoice dn link issue
Python
agpl-3.0
gsnbng/erpnext,gsnbng/erpnext,Aptitudetech/ERPNext,geekroot/erpnext,indictranstech/erpnext,indictranstech/erpnext,indictranstech/erpnext,gsnbng/erpnext,gsnbng/erpnext,geekroot/erpnext,indictranstech/erpnext,geekroot/erpnext,geekroot/erpnext
from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'], 'Delivery Note': ['items', 'delivery_note'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }Revert sales invoice dn link issue
from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }
<commit_before>from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'], 'Delivery Note': ['items', 'delivery_note'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }<commit_msg>Revert sales invoice dn link issue<commit_after>
from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }
from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'], 'Delivery Note': ['items', 'delivery_note'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }Revert sales invoice dn link issuefrom frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }
<commit_before>from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'], 'Delivery Note': ['items', 'delivery_note'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }<commit_msg>Revert sales invoice dn link issue<commit_after>from frappe import _ def get_data(): return { 'fieldname': 'sales_invoice', 'non_standard_fieldnames': { 'Delivery Note': 'against_sales_invoice', 'Journal Entry': 'reference_name', 'Payment Entry': 'reference_name', 'Payment Request': 'reference_name', 'Sales Invoice': 'return_against' }, 'internal_links': { 'Sales Order': ['items', 'sales_order'] }, 'transactions': [ { 'label': _('Payment'), 'items': ['Payment Entry', 'Payment Request', 'Journal Entry'] }, { 'label': _('Reference'), 'items': ['Timesheet', 'Delivery Note', 'Sales Order'] }, { 'label': _('Returns'), 'items': ['Sales Invoice'] }, ] }
93cefdc2c309ed0b81fe4ec7d49c0c8bead783a9
lib/path_utils.py
lib/path_utils.py
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__))
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
Use __file__ instead of __name__
Use __file__ instead of __name__
Python
bsd-3-clause
akeym/cyder,zeeman/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,OSU-Net/cyder,akeym/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,zeeman/cyder,OSU-Net/cyder,zeeman/cyder,murrown/cyder,drkitty/cyder,zeeman/cyder
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__)) Use __file__ instead of __name__
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
<commit_before>"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__)) <commit_msg>Use __file__ instead of __name__<commit_after>
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__)) Use __file__ instead of __name__"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
<commit_before>"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(abspath(__name__)) <commit_msg>Use __file__ instead of __name__<commit_after>"""Originally from funfactory (funfactory/path_utils.py) on a380a54""" import os from os.path import abspath, dirname def path(*a): return os.path.join(ROOT, *a) def import_mod_by_name(target): # stolen from mock :) components = target.split('.') import_path = components.pop(0) thing = __import__(import_path) for comp in components: import_path += ".%s" % comp thing = _dot_lookup(thing, comp, import_path) return thing def _dot_lookup(thing, comp, import_path): try: return getattr(thing, comp) except AttributeError: __import__(import_path) return getattr(thing, comp) ROOT = dirname(dirname(abspath(__file__)))
a6e3543650654a91abd0fa2af2064811debb47bd
ac_mediator/celery.py
ac_mediator/celery.py
from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens')
from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6, minute=0), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens')
Fix wrong 'renew_access_tokens' periodic task schedule
Fix wrong 'renew_access_tokens' periodic task schedule
Python
apache-2.0
AudioCommons/ac-mediator,AudioCommons/ac-mediator,AudioCommons/ac-mediator
from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens') Fix wrong 'renew_access_tokens' periodic task schedule
from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6, minute=0), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens')
<commit_before>from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens') <commit_msg>Fix wrong 'renew_access_tokens' periodic task schedule<commit_after>
from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6, minute=0), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens')
from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens') Fix wrong 'renew_access_tokens' periodic task schedulefrom __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6, minute=0), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens')
<commit_before>from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens') <commit_msg>Fix wrong 'renew_access_tokens' periodic task schedule<commit_after>from __future__ import absolute_import, unicode_literals import os from celery import Celery from django.core import management from celery.schedules import crontab # set the default Django settings module for the 'celery' program. os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ac_mediator.settings') app = Celery('ac_mediator') # Using a string here means the worker don't have to serialize # the configuration object to child processes. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. app.config_from_object('django.conf:settings', namespace='CELERY') # Load task modules from all registered Django app configs. app.autodiscover_tasks() app.autodiscover_tasks('api', related_name='requests_distributor') @app.task(bind=True) def debug_task(self): print('Request: {0!r}'.format(self.request)) @app.task(bind=True) def run_django_management_command(self, command, *args, **kwargs): management.call_command(command, *args, **kwargs) # Configure periodic tasks here @app.on_after_configure.connect def setup_periodic_tasks(sender, **kwargs): sender.add_periodic_task( crontab(hour=6, minute=0), # Every day at 6 am run_django_management_command.s('renew_access_tokens'), name='Renew expired tokens')
be6ede95d37717a65bd02969e8340afd2354dcdc
tests/basics/gen_yield_from_throw.py
tests/basics/gen_yield_from_throw.py
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!")
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
Add test for throw into yield-from with normal return.
tests/basics: Add test for throw into yield-from with normal return. This test was found by missing coverage of a branch in py/nativeglue.c.
Python
mit
pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") tests/basics: Add test for throw into yield-from with normal return. This test was found by missing coverage of a branch in py/nativeglue.c.
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
<commit_before>def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") <commit_msg>tests/basics: Add test for throw into yield-from with normal return. This test was found by missing coverage of a branch in py/nativeglue.c.<commit_after>
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") tests/basics: Add test for throw into yield-from with normal return. This test was found by missing coverage of a branch in py/nativeglue.c.def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
<commit_before>def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") <commit_msg>tests/basics: Add test for throw into yield-from with normal return. This test was found by missing coverage of a branch in py/nativeglue.c.<commit_after>def gen(): try: yield 1 except ValueError as e: print("got ValueError from upstream!", repr(e.args)) yield "str1" raise TypeError def gen2(): print((yield from gen())) g = gen2() print(next(g)) print(g.throw(ValueError)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing None as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, None)) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # passing an exception instance as second argument to throw g = gen2() print(next(g)) print(g.throw(ValueError, ValueError(123))) try: print(next(g)) except TypeError: print("got TypeError from downstream!") # thrown value is caught and then generator returns normally def gen(): try: yield 123 except ValueError: print('ValueError') # return normally after catching thrown exception def gen2(): yield from gen() yield 789 g = gen2() print(next(g)) print(g.throw(ValueError))
648daddbc75ee18201cc441dcf3ec34238e4479d
astropy/coordinates/__init__.py
astropy/coordinates/__init__.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs + """ .. note:: The ecliptic coordinate systems (added in Astropy v1.1) have not been extensively tested for accuracy or consistency with other implementations of ecliptic coordinates. We welcome contributions to add such testing, but in the meantime, users who depend on consistency with other implementations may wish to check test inputs against good datasets before using Astropy's ecliptic coordinates. """
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs
Remove "experimental" state of ecliptic frames
Remove "experimental" state of ecliptic frames
Python
bsd-3-clause
lpsinger/astropy,saimn/astropy,dhomeier/astropy,saimn/astropy,astropy/astropy,MSeifert04/astropy,larrybradley/astropy,dhomeier/astropy,MSeifert04/astropy,mhvk/astropy,lpsinger/astropy,pllim/astropy,saimn/astropy,astropy/astropy,MSeifert04/astropy,bsipocz/astropy,larrybradley/astropy,bsipocz/astropy,StuartLittlefair/astropy,stargaser/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,astropy/astropy,larrybradley/astropy,lpsinger/astropy,saimn/astropy,lpsinger/astropy,mhvk/astropy,dhomeier/astropy,bsipocz/astropy,aleksandr-bakanov/astropy,stargaser/astropy,astropy/astropy,StuartLittlefair/astropy,larrybradley/astropy,pllim/astropy,stargaser/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,larrybradley/astropy,astropy/astropy,mhvk/astropy,dhomeier/astropy,stargaser/astropy,mhvk/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,StuartLittlefair/astropy,pllim/astropy,pllim/astropy,StuartLittlefair/astropy,pllim/astropy,lpsinger/astropy
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs + """ .. note:: The ecliptic coordinate systems (added in Astropy v1.1) have not been extensively tested for accuracy or consistency with other implementations of ecliptic coordinates. We welcome contributions to add such testing, but in the meantime, users who depend on consistency with other implementations may wish to check test inputs against good datasets before using Astropy's ecliptic coordinates. """ Remove "experimental" state of ecliptic frames
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs + """ .. note:: The ecliptic coordinate systems (added in Astropy v1.1) have not been extensively tested for accuracy or consistency with other implementations of ecliptic coordinates. We welcome contributions to add such testing, but in the meantime, users who depend on consistency with other implementations may wish to check test inputs against good datasets before using Astropy's ecliptic coordinates. """ <commit_msg>Remove "experimental" state of ecliptic frames<commit_after>
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs + """ .. note:: The ecliptic coordinate systems (added in Astropy v1.1) have not been extensively tested for accuracy or consistency with other implementations of ecliptic coordinates. We welcome contributions to add such testing, but in the meantime, users who depend on consistency with other implementations may wish to check test inputs against good datasets before using Astropy's ecliptic coordinates. """ Remove "experimental" state of ecliptic frames# Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs + """ .. note:: The ecliptic coordinate systems (added in Astropy v1.1) have not been extensively tested for accuracy or consistency with other implementations of ecliptic coordinates. We welcome contributions to add such testing, but in the meantime, users who depend on consistency with other implementations may wish to check test inputs against good datasets before using Astropy's ecliptic coordinates. """ <commit_msg>Remove "experimental" state of ecliptic frames<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst """ This subpackage contains classes and functions for celestial coordinates of astronomical objects. It also contains a framework for conversions between coordinate systems. """ from .errors import * from .angles import * from .baseframe import * from .attributes import * from .distances import * from .earth import * from .transformations import * from .builtin_frames import * from .name_resolve import * from .matching import * from .representation import * from .sky_coordinate import * from .funcs import * from .calculation import * from .solar_system import * # This is for backwards-compatibility -- can be removed in v3.0 when the # deprecation warnings are removed from .attributes import (TimeFrameAttribute, QuantityFrameAttribute, CartesianRepresentationFrameAttribute) __doc__ += builtin_frames._transform_graph_docs
7980c2e38d388ec78fd94ee13272934988dfa0f7
swingtime/__init__.py
swingtime/__init__.py
from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' verbose_name = 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig'
# -*- coding: utf-8 -*- from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' _zero_width_space = u'\u200B' # used to make it last alphabetically, better option: http://stackoverflow.com/questions/398163/ordering-admin-modeladmin-objects-in-django-admin verbose_name = _zero_width_space + 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig'
Make calendar config show up at bottom of main admin
Make calendar config show up at bottom of main admin
Python
mit
thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee
from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' verbose_name = 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig' Make calendar config show up at bottom of main admin
# -*- coding: utf-8 -*- from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' _zero_width_space = u'\u200B' # used to make it last alphabetically, better option: http://stackoverflow.com/questions/398163/ordering-admin-modeladmin-objects-in-django-admin verbose_name = _zero_width_space + 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig'
<commit_before>from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' verbose_name = 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig' <commit_msg>Make calendar config show up at bottom of main admin<commit_after>
# -*- coding: utf-8 -*- from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' _zero_width_space = u'\u200B' # used to make it last alphabetically, better option: http://stackoverflow.com/questions/398163/ordering-admin-modeladmin-objects-in-django-admin verbose_name = _zero_width_space + 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig'
from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' verbose_name = 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig' Make calendar config show up at bottom of main admin# -*- coding: utf-8 -*- from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' _zero_width_space = u'\u200B' # used to make it last alphabetically, better option: http://stackoverflow.com/questions/398163/ordering-admin-modeladmin-objects-in-django-admin verbose_name = _zero_width_space + 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig'
<commit_before>from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' verbose_name = 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig' <commit_msg>Make calendar config show up at bottom of main admin<commit_after># -*- coding: utf-8 -*- from django.apps import AppConfig VERSION = (0, 3, 0, 'beta', 0) def get_version(): version = '%s.%s' % (VERSION[0], VERSION[1]) if VERSION[2]: version = '%s.%s' % (version, VERSION[2]) if VERSION[3:] == ('alpha', 0): version = '%s pre-alpha' % version else: version = '%s %s' % (version, VERSION[3]) if VERSION[3] != 'final': version = '%s %s' % (version, VERSION[4]) return version class SwingtimeAppConfig(AppConfig): name = 'swingtime' _zero_width_space = u'\u200B' # used to make it last alphabetically, better option: http://stackoverflow.com/questions/398163/ordering-admin-modeladmin-objects-in-django-admin verbose_name = _zero_width_space + 'Calendar Configuration' default_app_config = 'swingtime.SwingtimeAppConfig'
993bab40e4df323c671c99eec63d366028818a36
rosie/chamber_of_deputies/tests/test_election_expenses_classifier.py
rosie/chamber_of_deputies/tests/test_election_expenses_classifier.py
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject)
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts
Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts
Python
mit
marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,marcusrehm/serenata-de-amor,marcusrehm/serenata-de-amor,datasciencebr/rosie,datasciencebr/serenata-de-amor
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject) Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
<commit_before>from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject) <commit_msg>Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts<commit_after>
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject) Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility countsfrom unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
<commit_before>from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.subject = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.subject.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.subject.fit(self.dataset), self.subject) def test_tranform(self): self.assertEqual(self.subject.transform(), self.subject) <commit_msg>Remove a Rails accent of use subject in favor of Zen of Python: explicit is better than implicit and readbility counts<commit_after>from unittest import TestCase import numpy as np import pandas as pd from rosie.chamber_of_deputies.classifiers import ElectionExpensesClassifier class TestElectionExpensesClassifier(TestCase): def setUp(self): self.dataset = pd.read_csv('rosie/chamber_of_deputies/tests/fixtures/election_expenses_classifier.csv', dtype={'name': np.str, 'legal_entity': np.str}) self.election_expenser_classifier = ElectionExpensesClassifier() def test_is_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[0], True) def test_is_not_election_company(self): self.assertEqual(self.election_expenser_classifier.predict(self.dataset)[1], False) def test_fit(self): self.assertEqual(self.election_expenser_classifier.fit(self.dataset), self.election_expenser_classifier) def test_tranform(self): self.assertEqual(self.election_expenser_classifier.transform(), self.election_expenser_classifier)
1b7767dbc4fbaf69a6bf83a3989d5e672e0c7488
django_countries/filters.py
django_countries/filters.py
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ class CountryFilter(admin.SimpleListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') parameter_name = 'country' def lookups(self, request, model_admin): return set([ (obj.country, obj.country.name) for obj in model_admin.model.objects.exclude( country__isnull=True ).exclude(country__exact='') ]) def queryset(self, request, queryset): if self.value(): return queryset.filter(country=self.value()) else: return queryset
from django.contrib import admin from django.utils.encoding import force_text from django.utils.translation import ugettext as _ class CountryFilter(admin.FieldListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') def expected_parameters(self): return [self.field.name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string( {}, [self.field.name]), 'display': _('All'), } for lookup, title in self.lookup_choices(changelist): yield { 'selected': self.value() == force_text(lookup), 'query_string': changelist.get_query_string( {self.field.name: lookup}, []), 'display': title, } def lookup_choices(self, changelist): codes = changelist.queryset.values_list(self.field.name, flat=True) for k, v in self.field.get_choices(include_blank=False): if k in codes: yield k, v
Change the admin filter to a FieldListFilter
Change the admin filter to a FieldListFilter
Python
mit
schinckel/django-countries,SmileyChris/django-countries,pimlie/django-countries
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ class CountryFilter(admin.SimpleListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') parameter_name = 'country' def lookups(self, request, model_admin): return set([ (obj.country, obj.country.name) for obj in model_admin.model.objects.exclude( country__isnull=True ).exclude(country__exact='') ]) def queryset(self, request, queryset): if self.value(): return queryset.filter(country=self.value()) else: return queryset Change the admin filter to a FieldListFilter
from django.contrib import admin from django.utils.encoding import force_text from django.utils.translation import ugettext as _ class CountryFilter(admin.FieldListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') def expected_parameters(self): return [self.field.name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string( {}, [self.field.name]), 'display': _('All'), } for lookup, title in self.lookup_choices(changelist): yield { 'selected': self.value() == force_text(lookup), 'query_string': changelist.get_query_string( {self.field.name: lookup}, []), 'display': title, } def lookup_choices(self, changelist): codes = changelist.queryset.values_list(self.field.name, flat=True) for k, v in self.field.get_choices(include_blank=False): if k in codes: yield k, v
<commit_before>from django.contrib import admin from django.utils.translation import ugettext_lazy as _ class CountryFilter(admin.SimpleListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') parameter_name = 'country' def lookups(self, request, model_admin): return set([ (obj.country, obj.country.name) for obj in model_admin.model.objects.exclude( country__isnull=True ).exclude(country__exact='') ]) def queryset(self, request, queryset): if self.value(): return queryset.filter(country=self.value()) else: return queryset <commit_msg>Change the admin filter to a FieldListFilter<commit_after>
from django.contrib import admin from django.utils.encoding import force_text from django.utils.translation import ugettext as _ class CountryFilter(admin.FieldListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') def expected_parameters(self): return [self.field.name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string( {}, [self.field.name]), 'display': _('All'), } for lookup, title in self.lookup_choices(changelist): yield { 'selected': self.value() == force_text(lookup), 'query_string': changelist.get_query_string( {self.field.name: lookup}, []), 'display': title, } def lookup_choices(self, changelist): codes = changelist.queryset.values_list(self.field.name, flat=True) for k, v in self.field.get_choices(include_blank=False): if k in codes: yield k, v
from django.contrib import admin from django.utils.translation import ugettext_lazy as _ class CountryFilter(admin.SimpleListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') parameter_name = 'country' def lookups(self, request, model_admin): return set([ (obj.country, obj.country.name) for obj in model_admin.model.objects.exclude( country__isnull=True ).exclude(country__exact='') ]) def queryset(self, request, queryset): if self.value(): return queryset.filter(country=self.value()) else: return queryset Change the admin filter to a FieldListFilterfrom django.contrib import admin from django.utils.encoding import force_text from django.utils.translation import ugettext as _ class CountryFilter(admin.FieldListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') def expected_parameters(self): return [self.field.name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string( {}, [self.field.name]), 'display': _('All'), } for lookup, title in self.lookup_choices(changelist): yield { 'selected': self.value() == force_text(lookup), 'query_string': changelist.get_query_string( {self.field.name: lookup}, []), 'display': title, } def lookup_choices(self, changelist): codes = changelist.queryset.values_list(self.field.name, flat=True) for k, v in self.field.get_choices(include_blank=False): if k in codes: yield k, v
<commit_before>from django.contrib import admin from django.utils.translation import ugettext_lazy as _ class CountryFilter(admin.SimpleListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') parameter_name = 'country' def lookups(self, request, model_admin): return set([ (obj.country, obj.country.name) for obj in model_admin.model.objects.exclude( country__isnull=True ).exclude(country__exact='') ]) def queryset(self, request, queryset): if self.value(): return queryset.filter(country=self.value()) else: return queryset <commit_msg>Change the admin filter to a FieldListFilter<commit_after>from django.contrib import admin from django.utils.encoding import force_text from django.utils.translation import ugettext as _ class CountryFilter(admin.FieldListFilter): """ A country filter for Django admin that only returns a list of countries related to the model. """ title = _('Country') def expected_parameters(self): return [self.field.name] def choices(self, changelist): yield { 'selected': self.value() is None, 'query_string': changelist.get_query_string( {}, [self.field.name]), 'display': _('All'), } for lookup, title in self.lookup_choices(changelist): yield { 'selected': self.value() == force_text(lookup), 'query_string': changelist.get_query_string( {self.field.name: lookup}, []), 'display': title, } def lookup_choices(self, changelist): codes = changelist.queryset.values_list(self.field.name, flat=True) for k, v in self.field.get_choices(include_blank=False): if k in codes: yield k, v
78b45c06c8c52cf713de2dc1d7b1d3dc8e3c031e
test_addons/test_cases.py
test_addons/test_cases.py
# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports import mixins from mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass
# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports from . import mixins from .mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass
Use relative imports again to support python 3
Use relative imports again to support python 3
Python
mit
hspandher/django-test-addons
# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports import mixins from mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass Use relative imports again to support python 3
# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports from . import mixins from .mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass
<commit_before># inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports import mixins from mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass <commit_msg>Use relative imports again to support python 3<commit_after>
# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports from . import mixins from .mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass
# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports import mixins from mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass Use relative imports again to support python 3# inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports from . import mixins from .mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass
<commit_before># inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports import mixins from mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass <commit_msg>Use relative imports again to support python 3<commit_after># inbuild python imports # inbuilt django imports from django.test import LiveServerTestCase # third party imports # inter-app imports # local imports from . import mixins from .mixins import SimpleTestCase class MongoTestCase(mixins.MongoTestMixin, SimpleTestCase): """ TestCase that creates a mongo collection and clears it after each test """ pass class MongoLiveServerTestCase(mixins.MongoTestMixin, LiveServerTestCase): """ TestCase that runs liveserver using mongodb instead of relational database """ pass class Neo4jTestCase(mixins.Neo4jTestMixin, SimpleTestCase): pass class MongoNeo4jTestCase(mixins.MongoNeo4jTestMixin, mixins.SimpleTestCase): pass class RedisTestCase(mixins.RedisTestMixin, mixins.SimpleTestCase): pass class MongoRedisTestCase(mixins.MongoRedisTestMixin, mixins.SimpleTestCase): pass class RedisMongoNeo4jTestCase(mixins.RedisMongoNeo4jTestMixin, mixins.SimpleTestCase): pass class APIRedisTestCase(mixins.ApiTestMixin, RedisTestCase): pass class APIMongoTestCase(mixins.ApiTestMixin, MongoTestCase): pass class APINeo4jTestCase(mixins.ApiTestMixin, Neo4jTestCase): pass class APIMongoRedisTestCase(mixins.ApiTestMixin, MongoRedisTestCase): pass class APIRedisMongoNeo4jTestCase(mixins.ApiTestMixin, RedisMongoNeo4jTestCase): pass
b992f8ca9d7e4269592dd4fe8129a7afe92634a7
libraries/exception.py
libraries/exception.py
class OkupyException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value)
from django.contrib.sites.models import Site class OkupyException(Exception): ''' Custon exception class ''' def __init__(self, value): self.value = value def __str__(self): return repr(self.value) def log_extra_data(request = None, form = None): ''' Extra data needed by the custom formatter All values default to None ''' log_extra_data = { 'site_name': Site.objects.get_current().name or 'None', 'clientip':request.META.get('REMOTE_ADDR','None'), } if form: log_extra_data['username'] = form.data.get('username','None') else: if request.user.is_authenticated(): ''' Handle logged in users ''' log_extra_data['username'] = request.user.name else: ''' Handle anonymous users ''' log_extra_data['username'] = 'Anonymous' return log_extra_data
Add a log_extra_data function, which returns a dictionary needed by the logging formatter Add comments in the lib
Add a log_extra_data function, which returns a dictionary needed by the logging formatter Add comments in the lib
Python
agpl-3.0
dastergon/identity.gentoo.org,dastergon/identity.gentoo.org,gentoo/identity.gentoo.org,gentoo/identity.gentoo.org
class OkupyException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) Add a log_extra_data function, which returns a dictionary needed by the logging formatter Add comments in the lib
from django.contrib.sites.models import Site class OkupyException(Exception): ''' Custon exception class ''' def __init__(self, value): self.value = value def __str__(self): return repr(self.value) def log_extra_data(request = None, form = None): ''' Extra data needed by the custom formatter All values default to None ''' log_extra_data = { 'site_name': Site.objects.get_current().name or 'None', 'clientip':request.META.get('REMOTE_ADDR','None'), } if form: log_extra_data['username'] = form.data.get('username','None') else: if request.user.is_authenticated(): ''' Handle logged in users ''' log_extra_data['username'] = request.user.name else: ''' Handle anonymous users ''' log_extra_data['username'] = 'Anonymous' return log_extra_data
<commit_before>class OkupyException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) <commit_msg>Add a log_extra_data function, which returns a dictionary needed by the logging formatter Add comments in the lib<commit_after>
from django.contrib.sites.models import Site class OkupyException(Exception): ''' Custon exception class ''' def __init__(self, value): self.value = value def __str__(self): return repr(self.value) def log_extra_data(request = None, form = None): ''' Extra data needed by the custom formatter All values default to None ''' log_extra_data = { 'site_name': Site.objects.get_current().name or 'None', 'clientip':request.META.get('REMOTE_ADDR','None'), } if form: log_extra_data['username'] = form.data.get('username','None') else: if request.user.is_authenticated(): ''' Handle logged in users ''' log_extra_data['username'] = request.user.name else: ''' Handle anonymous users ''' log_extra_data['username'] = 'Anonymous' return log_extra_data
class OkupyException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) Add a log_extra_data function, which returns a dictionary needed by the logging formatter Add comments in the libfrom django.contrib.sites.models import Site class OkupyException(Exception): ''' Custon exception class ''' def __init__(self, value): self.value = value def __str__(self): return repr(self.value) def log_extra_data(request = None, form = None): ''' Extra data needed by the custom formatter All values default to None ''' log_extra_data = { 'site_name': Site.objects.get_current().name or 'None', 'clientip':request.META.get('REMOTE_ADDR','None'), } if form: log_extra_data['username'] = form.data.get('username','None') else: if request.user.is_authenticated(): ''' Handle logged in users ''' log_extra_data['username'] = request.user.name else: ''' Handle anonymous users ''' log_extra_data['username'] = 'Anonymous' return log_extra_data
<commit_before>class OkupyException(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) <commit_msg>Add a log_extra_data function, which returns a dictionary needed by the logging formatter Add comments in the lib<commit_after>from django.contrib.sites.models import Site class OkupyException(Exception): ''' Custon exception class ''' def __init__(self, value): self.value = value def __str__(self): return repr(self.value) def log_extra_data(request = None, form = None): ''' Extra data needed by the custom formatter All values default to None ''' log_extra_data = { 'site_name': Site.objects.get_current().name or 'None', 'clientip':request.META.get('REMOTE_ADDR','None'), } if form: log_extra_data['username'] = form.data.get('username','None') else: if request.user.is_authenticated(): ''' Handle logged in users ''' log_extra_data['username'] = request.user.name else: ''' Handle anonymous users ''' log_extra_data['username'] = 'Anonymous' return log_extra_data
06dd6fb991a9f68eea99d5943498688daa0b09c2
tests/test_interaction.py
tests/test_interaction.py
import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1']
import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] assert merged['join_index'].isin(choosers.index).all()
Return DCM probabilities as MultiIndexed Series
Return DCM probabilities as MultiIndexed Series Instead of separately returning probabilities and alternatives information this groups them all together. The probabilities have a MultiIndex with chooser IDs on the outside and alternative IDs on the inside.
Python
bsd-3-clause
UDST/choicemodels,UDST/choicemodels
import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] Return DCM probabilities as MultiIndexed Series Instead of separately returning probabilities and alternatives information this groups them all together. The probabilities have a MultiIndex with chooser IDs on the outside and alternative IDs on the inside.
import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] assert merged['join_index'].isin(choosers.index).all()
<commit_before>import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] <commit_msg>Return DCM probabilities as MultiIndexed Series Instead of separately returning probabilities and alternatives information this groups them all together. The probabilities have a MultiIndex with chooser IDs on the outside and alternative IDs on the inside.<commit_after>
import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] assert merged['join_index'].isin(choosers.index).all()
import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] Return DCM probabilities as MultiIndexed Series Instead of separately returning probabilities and alternatives information this groups them all together. The probabilities have a MultiIndex with chooser IDs on the outside and alternative IDs on the inside.import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] assert merged['join_index'].isin(choosers.index).all()
<commit_before>import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] <commit_msg>Return DCM probabilities as MultiIndexed Series Instead of separately returning probabilities and alternatives information this groups them all together. The probabilities have a MultiIndex with chooser IDs on the outside and alternative IDs on the inside.<commit_after>import numpy.testing as npt import pandas as pd import pandas.util.testing as pdt import pytest from .. import interaction as inter @pytest.fixture def choosers(): return pd.DataFrame( {'var1': range(5, 10), 'thing_id': ['a', 'c', 'e', 'g', 'i']}) @pytest.fixture def alternatives(): return pd.DataFrame( {'var2': range(10, 20), 'var3': range(20, 30)}, index=pd.Index([x for x in 'abcdefghij'], name='thing_id')) def test_interaction_dataset_sim(choosers, alternatives): sample, merged, chosen = inter.mnl_interaction_dataset( choosers, alternatives, len(alternatives)) # chosen should be len(choosers) rows * len(alternatives) cols assert chosen.shape == (len(choosers), len(alternatives)) assert chosen[:, 0].sum() == len(choosers) assert chosen[:, 1:].sum() == 0 npt.assert_array_equal( sample, list(alternatives.index.values) * len(choosers)) assert len(merged) == len(choosers) * len(alternatives) npt.assert_array_equal(merged.index.values, sample) assert list(merged.columns) == [ 'var2', 'var3', 'join_index', 'thing_id', 'var1'] assert merged['join_index'].isin(choosers.index).all()
bf84fc3eb941d3d2e370fd95d42e4dd838515bfc
tests/unit/config_test.py
tests/unit/config_test.py
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', '') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', b'') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
Add b option to string
Add b option to string
Python
mit
diegotoral/SaltShaker,diegotoral/SaltShaker
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', '') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename) Add b option to string
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', b'') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
<commit_before># -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', '') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename) <commit_msg>Add b option to string<commit_after>
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', b'') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', '') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename) Add b option to string# -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', b'') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
<commit_before># -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', '') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename) <commit_msg>Add b option to string<commit_after># -*- coding: utf-8 -*- import os import unittest from testfixtures import TempDirectory from saltshaker.config import load_config from saltshaker.exceptions import ConfigurationError class TestConfig(unittest.TestCase): def setUp(self): self.tmp_dir = TempDirectory() def tearDown(self): self.tmp_dir.cleanup() def test_load_config_for_path_none(self): self.assertEqual(load_config(None), {}) def test_load_config_for_invalid_path(self): self.assertRaises(ConfigurationError, load_config, '/some/path') def test_load_config_for_valid_path(self): path = self._write_config_file('.shaker.yml', b'') self.assertEqual(load_config(path), None) def test_load_config_with_sources(self): path = self._write_config_file('.shaker.yml', b'sources:') self.assertEqual(load_config(path), { 'sources': None }) def _write_config_file(self, filename, filecontent): self.tmp_dir.write(filename, filecontent) return os.path.join(self.tmp_dir.path, filename)
d5a3b6e1eb37883a16c7e98d2a1b7c98d8d67051
layout/tests.py
layout/tests.py
from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home)
from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
Add test for home page html content
Add test for home page html content
Python
mit
jvanbrug/scout,jvanbrug/scout
from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) Add test for home page html content
from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
<commit_before>from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) <commit_msg>Add test for home page html content<commit_after>
from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) Add test for home page html contentfrom django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
<commit_before>from django.core.urlresolvers import resolve from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) <commit_msg>Add test for home page html content<commit_after>from django.core.urlresolvers import resolve from django.http import HttpRequest from django.template.loader import render_to_string from django.test import TestCase from layout.views import home class HomePageTest(TestCase): def test_root_url_resolves_to_home_page(self): found = resolve('/') self.assertEqual(found.func, home) def test_home_page_returns_correct_html(self): expected_html = render_to_string('home.html') request = HttpRequest() response = home(request) actual_html = response.content.decode() self.assertEqual(actual_html, expected_html)
9866b33ad5ba011c911346bae701973a5878f59e
blaze/compute/hdfstore.py
blaze/compute/hdfstore.py
from __future__ import absolute_import, division, print_function from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) from collections import namedtuple HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key)
from __future__ import absolute_import, division, print_function from collections import namedtuple from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key)
Move namedtuple to the top of the file
Move namedtuple to the top of the file
Python
bsd-3-clause
ContinuumIO/blaze,cowlicks/blaze,ChinaQuants/blaze,dwillmer/blaze,cpcloud/blaze,maxalbert/blaze,ContinuumIO/blaze,cowlicks/blaze,ChinaQuants/blaze,caseyclements/blaze,LiaoPan/blaze,xlhtc007/blaze,jcrist/blaze,dwillmer/blaze,nkhuyu/blaze,jcrist/blaze,LiaoPan/blaze,cpcloud/blaze,nkhuyu/blaze,maxalbert/blaze,caseyclements/blaze,alexmojaki/blaze,scls19fr/blaze,alexmojaki/blaze,scls19fr/blaze,jdmcbr/blaze,jdmcbr/blaze,xlhtc007/blaze
from __future__ import absolute_import, division, print_function from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) from collections import namedtuple HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key) Move namedtuple to the top of the file
from __future__ import absolute_import, division, print_function from collections import namedtuple from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key)
<commit_before>from __future__ import absolute_import, division, print_function from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) from collections import namedtuple HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key) <commit_msg>Move namedtuple to the top of the file<commit_after>
from __future__ import absolute_import, division, print_function from collections import namedtuple from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key)
from __future__ import absolute_import, division, print_function from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) from collections import namedtuple HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key) Move namedtuple to the top of the filefrom __future__ import absolute_import, division, print_function from collections import namedtuple from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key)
<commit_before>from __future__ import absolute_import, division, print_function from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) from collections import namedtuple HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key) <commit_msg>Move namedtuple to the top of the file<commit_after>from __future__ import absolute_import, division, print_function from collections import namedtuple from .core import pre_compute from ..expr import Expr, Field from ..dispatch import dispatch from odo import into, chunks import pandas as pd @dispatch(Expr, pd.io.pytables.AppendableFrameTable) def pre_compute(expr, data, **kwargs): return into(chunks(pd.DataFrame), data, **kwargs) @dispatch(Expr, pd.io.pytables.Fixed) def pre_compute(expr, data, **kwargs): return into(pd.DataFrame, data, **kwargs) @dispatch(Field, pd.HDFStore) def compute_up(expr, data, **kwargs): key = '/' + expr._name if key in data.keys(): return data.get_storer(key) else: return HDFGroup(data, key) HDFGroup = namedtuple('HDFGroup', 'parent,datapath') @dispatch(Field, HDFGroup) def compute_up(expr, data, **kwargs): key = data.datapath + '/' + expr._name if key in data.parent.keys(): return data.parent.get_storer(key) else: return HDFGroup(data.parent, key)
7ea7545cb0d9924fd0d7f3c658e2545a41ae11f0
arcutils/test/base.py
arcutils/test/base.py
import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client
import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client
Revert "Add patch_json method to our test Client"
Revert "Add patch_json method to our test Client" This reverts commit deef2f98deeeaf51bd9ddda4c5a200d082e16522. Somehow, I missed that this method was already there and added it again. I'm going to blame it on burnout.
Python
mit
wylee/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils,PSU-OIT-ARC/django-arcutils
import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client Revert "Add patch_json method to our test Client" This reverts commit deef2f98deeeaf51bd9ddda4c5a200d082e16522. Somehow, I missed that this method was already there and added it again. I'm going to blame it on burnout.
import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client
<commit_before>import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client <commit_msg>Revert "Add patch_json method to our test Client" This reverts commit deef2f98deeeaf51bd9ddda4c5a200d082e16522. Somehow, I missed that this method was already there and added it again. I'm going to blame it on burnout.<commit_after>
import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client
import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client Revert "Add patch_json method to our test Client" This reverts commit deef2f98deeeaf51bd9ddda4c5a200d082e16522. Somehow, I missed that this method was already there and added it again. I'm going to blame it on burnout.import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client
<commit_before>import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client <commit_msg>Revert "Add patch_json method to our test Client" This reverts commit deef2f98deeeaf51bd9ddda4c5a200d082e16522. Somehow, I missed that this method was already there and added it again. I'm going to blame it on burnout.<commit_after>import json import django.test from .user import UserMixin class Client(django.test.Client): def patch_json(self, path, data=None, **kwargs): return self.patch(path, **self._json_kwargs(data, kwargs)) def post_json(self, path, data=None, **kwargs): return self.post(path, **self._json_kwargs(data, kwargs)) def put_json(self, path, data=None, **kwargs): return self.put(path, **self._json_kwargs(data, kwargs)) def _json_kwargs(self, data, kwargs): if data is not None: data = json.dumps(data) kwargs['data'] = data kwargs['content_type'] = 'application/json' return kwargs class FunctionalTestCase(django.test.TestCase, UserMixin): """Base class for view tests. It adds the following to Django's `TestCase`: - Convenient user creation & login - Convenient POSTs, PUTs, and PATCHes with a JSON body """ client_class = Client
98682412dccf2a5e38f0f701dbfe452e4e87a8aa
wagtail/admin/urls/password_reset.py
wagtail/admin/urls/password_reset.py
from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
Use new-style URL paths for password reset views
Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp).
Python
bsd-3-clause
torchbox/wagtail,kaedroho/wagtail,thenewguy/wagtail,zerolab/wagtail,torchbox/wagtail,thenewguy/wagtail,gasman/wagtail,wagtail/wagtail,gasman/wagtail,FlipperPA/wagtail,takeflight/wagtail,takeflight/wagtail,rsalmaso/wagtail,jnns/wagtail,wagtail/wagtail,gasman/wagtail,mixxorz/wagtail,takeflight/wagtail,jnns/wagtail,rsalmaso/wagtail,zerolab/wagtail,jnns/wagtail,mixxorz/wagtail,kaedroho/wagtail,kaedroho/wagtail,zerolab/wagtail,thenewguy/wagtail,kaedroho/wagtail,rsalmaso/wagtail,zerolab/wagtail,FlipperPA/wagtail,jnns/wagtail,torchbox/wagtail,FlipperPA/wagtail,wagtail/wagtail,torchbox/wagtail,takeflight/wagtail,mixxorz/wagtail,zerolab/wagtail,rsalmaso/wagtail,FlipperPA/wagtail,mixxorz/wagtail,thenewguy/wagtail,gasman/wagtail,mixxorz/wagtail,wagtail/wagtail,kaedroho/wagtail,wagtail/wagtail,gasman/wagtail,thenewguy/wagtail,rsalmaso/wagtail
from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ] Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp).
from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
<commit_before>from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ] <commit_msg>Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp).<commit_after>
from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ] Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp).from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
<commit_before>from django.urls import path, re_path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), re_path( r'^confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ] <commit_msg>Use new-style URL paths for password reset views This matches what Django has done in the corresponding views: https://github.com/django/django/blob/5d4b9c1cab03f0d057f0c7751862df0302c65cf9/django/contrib/auth/urls.py and prevents it from breaking on Django 3.1 (because the token is now longer than the 13+20 chars allowed by the original regexp).<commit_after>from django.urls import path from wagtail.admin.views import account urlpatterns = [ path( '', account.PasswordResetView.as_view(), name='wagtailadmin_password_reset' ), path( 'done/', account.PasswordResetDoneView.as_view(), name='wagtailadmin_password_reset_done' ), path( 'confirm/<uidb64>/<token>/', account.PasswordResetConfirmView.as_view(), name='wagtailadmin_password_reset_confirm', ), path( 'complete/', account.PasswordResetCompleteView.as_view(), name='wagtailadmin_password_reset_complete' ), ]
b7c967ad0f45cc1144a8713c6513bae5bca89242
LiSE/LiSE/test_proxy.py
LiSE/LiSE/test_proxy.py
from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass
from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass
Delete BranchLineageTest, which assumes bidirectional graphs exist
Delete BranchLineageTest, which assumes bidirectional graphs exist
Python
agpl-3.0
LogicalDash/LiSE,LogicalDash/LiSE
from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass Delete BranchLineageTest, which assumes bidirectional graphs exist
from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass
<commit_before>from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass <commit_msg>Delete BranchLineageTest, which assumes bidirectional graphs exist<commit_after>
from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass
from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass Delete BranchLineageTest, which assumes bidirectional graphs existfrom LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass
<commit_before>from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class BranchLineageTest(ProxyGraphTest, allegedb.test.AbstractBranchLineageTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass <commit_msg>Delete BranchLineageTest, which assumes bidirectional graphs exist<commit_after>from LiSE.proxy import EngineProcessManager import allegedb.test class ProxyTest(allegedb.test.AllegedTest): def setUp(self): self.manager = EngineProcessManager() self.engine = self.manager.start('sqlite:///:memory:') self.graphmakers = (self.engine.new_character,) def tearDown(self): self.manager.shutdown() class ProxyGraphTest(allegedb.test.AbstractGraphTest, ProxyTest): pass class DictStorageTest(ProxyTest, allegedb.test.DictStorageTest): pass class ListStorageTest(ProxyTest, allegedb.test.ListStorageTest): pass class SetStorageTest(ProxyTest, allegedb.test.SetStorageTest): pass
3b9f6e41014859eecc9d6ef01ec10fed40775861
medical_prescription_us/tests/test_medical_prescription_order_line.py
medical_prescription_us/tests/test_medical_prescription_order_line.py
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp.tests.common import TransactionCase from openerp.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo.tests.common import TransactionCase from odoo.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1
Upgrade test namespace * Change openerp namespace to odoo in test imports
[MIG] medical_prescription_us: Upgrade test namespace * Change openerp namespace to odoo in test imports
Python
agpl-3.0
laslabs/vertical-medical,laslabs/vertical-medical
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp.tests.common import TransactionCase from openerp.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1 [MIG] medical_prescription_us: Upgrade test namespace * Change openerp namespace to odoo in test imports
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo.tests.common import TransactionCase from odoo.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1
<commit_before># -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp.tests.common import TransactionCase from openerp.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1 <commit_msg>[MIG] medical_prescription_us: Upgrade test namespace * Change openerp namespace to odoo in test imports<commit_after>
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo.tests.common import TransactionCase from odoo.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1
# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp.tests.common import TransactionCase from openerp.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1 [MIG] medical_prescription_us: Upgrade test namespace * Change openerp namespace to odoo in test imports# -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo.tests.common import TransactionCase from odoo.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1
<commit_before># -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from openerp.tests.common import TransactionCase from openerp.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1 <commit_msg>[MIG] medical_prescription_us: Upgrade test namespace * Change openerp namespace to odoo in test imports<commit_after># -*- coding: utf-8 -*- # Copyright 2016 LasLabs Inc. # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). from odoo.tests.common import TransactionCase from odoo.exceptions import ValidationError class TestMedicalPrescriptionOrderLine(TransactionCase): def setUp(self): super(TestMedicalPrescriptionOrderLine, self).setUp() self.order_line_1 = self.env.ref( 'medical_prescription.' + 'medical_prescription_order_line_patient_1_order_1_line_1' ) def test_check_refill_qty_original(self): """ Test refill_qty_original cannot be less than 0 """ with self.assertRaises(ValidationError): self.order_line_1.refill_qty_original = -1
d5a1bfbff18cf129550c2c423beb8db9302c0736
tests/redisdl_test.py
tests/redisdl_test.py
import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual)
import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def setUp(self): import redis self.r = redis.Redis() for key in self.r.keys('*'): self.r.delete(key) def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual)
Clear redis data store before running tests
Clear redis data store before running tests
Python
bsd-2-clause
hyunchel/redis-dump-load,p/redis-dump-load,hyunchel/redis-dump-load,p/redis-dump-load
import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual) Clear redis data store before running tests
import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def setUp(self): import redis self.r = redis.Redis() for key in self.r.keys('*'): self.r.delete(key) def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual)
<commit_before>import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual) <commit_msg>Clear redis data store before running tests<commit_after>
import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def setUp(self): import redis self.r = redis.Redis() for key in self.r.keys('*'): self.r.delete(key) def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual)
import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual) Clear redis data store before running testsimport redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def setUp(self): import redis self.r = redis.Redis() for key in self.r.keys('*'): self.r.delete(key) def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual)
<commit_before>import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual) <commit_msg>Clear redis data store before running tests<commit_after>import redisdl import unittest import json import os.path class RedisdlTest(unittest.TestCase): def setUp(self): import redis self.r = redis.Redis() for key in self.r.keys('*'): self.r.delete(key) def test_roundtrip(self): path = os.path.join(os.path.dirname(__file__), 'fixtures', 'dump.json') with open(path) as f: dump = f.read() redisdl.loads(dump) redump = redisdl.dumps() expected = json.loads(dump) actual = json.loads(redump) self.assertEqual(expected, actual)
0549a85f83bb4fb95aff3c4fc8d8a699c7e73fa9
chainer/utils/argument.py
chainer/utils/argument.py
def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: import inspect caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
import inspect def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
Put `import inspect` at the top of the file
Put `import inspect` at the top of the file
Python
mit
keisuke-umezawa/chainer,niboshi/chainer,wkentaro/chainer,chainer/chainer,rezoo/chainer,keisuke-umezawa/chainer,hvy/chainer,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer,jnishi/chainer,niboshi/chainer,pfnet/chainer,keisuke-umezawa/chainer,anaruse/chainer,hvy/chainer,okuta/chainer,chainer/chainer,hvy/chainer,niboshi/chainer,jnishi/chainer,ktnyt/chainer,tkerola/chainer,chainer/chainer,jnishi/chainer,okuta/chainer,jnishi/chainer,ronekko/chainer,wkentaro/chainer,ktnyt/chainer,chainer/chainer,hvy/chainer,wkentaro/chainer,niboshi/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer
def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: import inspect caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs) Put `import inspect` at the top of the file
import inspect def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
<commit_before>def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: import inspect caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs) <commit_msg>Put `import inspect` at the top of the file<commit_after>
import inspect def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: import inspect caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs) Put `import inspect` at the top of the fileimport inspect def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
<commit_before>def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: import inspect caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs) <commit_msg>Put `import inspect` at the top of the file<commit_after>import inspect def check_unexpected_kwargs(kwargs, **unexpected): for key, message in unexpected.items(): if key in kwargs: raise ValueError(message) def parse_kwargs(kwargs, *name_and_values): values = [kwargs.pop(name, default_value) for name, default_value in name_and_values] if kwargs: caller = inspect.stack()[1] args = ', '.join(repr(arg) for arg in sorted(kwargs.keys())) message = caller[3] + \ '() got unexpected keyword argument(s) {}'.format(args) raise TypeError(message) return tuple(values) def assert_kwargs_empty(kwargs): # It only checks if kwargs is empty. parse_kwargs(kwargs)
50072e2e2fa2f650dd1899b14aaaecb2dfe909ef
tests/test_plugins.py
tests/test_plugins.py
# -*- coding:utf-8 -*- import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
# -*- coding:utf-8 -*- import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
Clear signals after testing plugins
Clear signals after testing plugins
Python
mit
xouillet/sigal,t-animal/sigal,xouillet/sigal,saimn/sigal,xouillet/sigal,jasuarez/sigal,t-animal/sigal,t-animal/sigal,jasuarez/sigal,saimn/sigal,jasuarez/sigal,saimn/sigal
# -*- coding:utf-8 -*- import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file Clear signals after testing plugins
# -*- coding:utf-8 -*- import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
<commit_before># -*- coding:utf-8 -*- import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file <commit_msg>Clear signals after testing plugins<commit_after>
# -*- coding:utf-8 -*- import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
# -*- coding:utf-8 -*- import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file Clear signals after testing plugins# -*- coding:utf-8 -*- import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
<commit_before># -*- coding:utf-8 -*- import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file <commit_msg>Clear signals after testing plugins<commit_after># -*- coding:utf-8 -*- import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
2a3e0b57f8f18404d5949abfd14cea8f51bfad13
tests/test_queries.py
tests/test_queries.py
from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_statement_response_list_contains(self): query = self.query.statement_response_list_contains('Hey') self.assertIn('in_response_to', query.value()) self.assertIn('$elemMatch', query.value()['in_response_to']) self.assertIn('text', query.value()['in_response_to']['$elemMatch']) self.assertEqual('Hey', query.value()['in_response_to']['$elemMatch']['text']) def test_statement_response_list_equals(self): query = self.query.statement_response_list_equals([]) self.assertIn('in_response_to', query.value()) self.assertEqual(query.value()['in_response_to'], []) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing')
from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing')
Remove test cases for query methods being removed.
Remove test cases for query methods being removed.
Python
bsd-3-clause
gunthercox/ChatterBot,vkosuri/ChatterBot
from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_statement_response_list_contains(self): query = self.query.statement_response_list_contains('Hey') self.assertIn('in_response_to', query.value()) self.assertIn('$elemMatch', query.value()['in_response_to']) self.assertIn('text', query.value()['in_response_to']['$elemMatch']) self.assertEqual('Hey', query.value()['in_response_to']['$elemMatch']['text']) def test_statement_response_list_equals(self): query = self.query.statement_response_list_equals([]) self.assertIn('in_response_to', query.value()) self.assertEqual(query.value()['in_response_to'], []) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing') Remove test cases for query methods being removed.
from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing')
<commit_before>from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_statement_response_list_contains(self): query = self.query.statement_response_list_contains('Hey') self.assertIn('in_response_to', query.value()) self.assertIn('$elemMatch', query.value()['in_response_to']) self.assertIn('text', query.value()['in_response_to']['$elemMatch']) self.assertEqual('Hey', query.value()['in_response_to']['$elemMatch']['text']) def test_statement_response_list_equals(self): query = self.query.statement_response_list_equals([]) self.assertIn('in_response_to', query.value()) self.assertEqual(query.value()['in_response_to'], []) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing') <commit_msg>Remove test cases for query methods being removed.<commit_after>
from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing')
from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_statement_response_list_contains(self): query = self.query.statement_response_list_contains('Hey') self.assertIn('in_response_to', query.value()) self.assertIn('$elemMatch', query.value()['in_response_to']) self.assertIn('text', query.value()['in_response_to']['$elemMatch']) self.assertEqual('Hey', query.value()['in_response_to']['$elemMatch']['text']) def test_statement_response_list_equals(self): query = self.query.statement_response_list_equals([]) self.assertIn('in_response_to', query.value()) self.assertEqual(query.value()['in_response_to'], []) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing') Remove test cases for query methods being removed.from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing')
<commit_before>from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_statement_response_list_contains(self): query = self.query.statement_response_list_contains('Hey') self.assertIn('in_response_to', query.value()) self.assertIn('$elemMatch', query.value()['in_response_to']) self.assertIn('text', query.value()['in_response_to']['$elemMatch']) self.assertEqual('Hey', query.value()['in_response_to']['$elemMatch']['text']) def test_statement_response_list_equals(self): query = self.query.statement_response_list_equals([]) self.assertIn('in_response_to', query.value()) self.assertEqual(query.value()['in_response_to'], []) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing') <commit_msg>Remove test cases for query methods being removed.<commit_after>from unittest import TestCase class MongoAdapterTestCase(TestCase): def setUp(self): from chatterbot.storage.mongodb import Query self.query = Query() def test_statement_text_equals(self): query = self.query.statement_text_equals('Testing in progress') self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'Testing in progress') def test_statement_text_not_in(self): query = self.query.statement_text_not_in(['One', 'Two']) self.assertIn('text', query.value()) self.assertIn('$nin', query.value()['text']) self.assertIn('One', query.value()['text']['$nin']) self.assertIn('Two', query.value()['text']['$nin']) def test_raw(self): query = self.query.raw({'text': 'testing'}) self.assertIn('text', query.value()) self.assertEqual(query.value()['text'], 'testing')
f10d2865a8f858f05e72709655d15923ea706bb3
unitTestUtils/parseXML.py
unitTestUtils/parseXML.py
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): eprint("Error detected") print(infile) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main()
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): element=root.findall('.//FatalError')[0] eprint("Error detected") print(infile) print(element.text) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main()
Add more verbose error to reporte on Travis parserXML.py
Add more verbose error to reporte on Travis parserXML.py
Python
apache-2.0
alexkernphysiker/j-pet-framework,alexkernphysiker/j-pet-framework,alexkernphysiker/j-pet-framework,JPETTomography/j-pet-framework,JPETTomography/j-pet-framework,alexkernphysiker/j-pet-framework,JPETTomography/j-pet-framework,alexkernphysiker/j-pet-framework
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): eprint("Error detected") print(infile) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main() Add more verbose error to reporte on Travis parserXML.py
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): element=root.findall('.//FatalError')[0] eprint("Error detected") print(infile) print(element.text) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main()
<commit_before>#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): eprint("Error detected") print(infile) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main() <commit_msg>Add more verbose error to reporte on Travis parserXML.py<commit_after>
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): element=root.findall('.//FatalError')[0] eprint("Error detected") print(infile) print(element.text) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main()
#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): eprint("Error detected") print(infile) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main() Add more verbose error to reporte on Travis parserXML.py#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): element=root.findall('.//FatalError')[0] eprint("Error detected") print(infile) print(element.text) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main()
<commit_before>#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): eprint("Error detected") print(infile) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main() <commit_msg>Add more verbose error to reporte on Travis parserXML.py<commit_after>#!/usr/bin/python # -*- coding: utf-8 -*- from __future__ import print_function from xml.etree.ElementTree import ParseError import xml.etree.ElementTree as ET import glob import sys def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) def parse(): for infile in glob.glob('*.xml'): try: tree = ET.parse(infile) root = tree.getroot() if root.findall('.//FatalError'): element=root.findall('.//FatalError')[0] eprint("Error detected") print(infile) print(element.text) sys.exit(1) except ParseError: eprint("The file xml isn't correct. There were some mistakes in the tests ") sys.exit(1) def main(): parse() if __name__ == '__main__': main()
47ce2347b05bb13af6c6430c24341e5892b22ffa
spacy/tests/regression/test_issue1506.py
spacy/tests/regression/test_issue1506.py
# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.'
# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() # We should run cleanup more than one time to actually cleanup data. # In first run — clean up only mark strings as «not hitted». if i == 20000 or i == 30000: gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.'
Create test that fails when actual cleanup caused
Create test that fails when actual cleanup caused
Python
mit
explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,explosion/spaCy,honnibal/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,aikramer2/spaCy
# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.' Create test that fails when actual cleanup caused
# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() # We should run cleanup more than one time to actually cleanup data. # In first run — clean up only mark strings as «not hitted». if i == 20000 or i == 30000: gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.'
<commit_before># coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.' <commit_msg>Create test that fails when actual cleanup caused<commit_after>
# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() # We should run cleanup more than one time to actually cleanup data. # In first run — clean up only mark strings as «not hitted». if i == 20000 or i == 30000: gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.'
# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.' Create test that fails when actual cleanup caused# coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() # We should run cleanup more than one time to actually cleanup data. # In first run — clean up only mark strings as «not hitted». if i == 20000 or i == 30000: gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.'
<commit_before># coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.' <commit_msg>Create test that fails when actual cleanup caused<commit_after># coding: utf8 from __future__ import unicode_literals import gc from ...lang.en import English def test_issue1506(): nlp = English() def string_generator(): for _ in range(10001): yield "It's sentence produced by that bug." yield "Oh snap." for _ in range(10001): yield "I erase lemmas." for _ in range(10001): yield "It's sentence produced by that bug." for _ in range(10001): yield "It's sentence produced by that bug." anchor = None remember = None for i, d in enumerate(nlp.pipe(string_generator())): if i == 9999: anchor = d elif 10001 == i: remember = d elif i == 10002: del anchor gc.collect() # We should run cleanup more than one time to actually cleanup data. # In first run — clean up only mark strings as «not hitted». if i == 20000 or i == 30000: gc.collect() for t in d: str(t.lemma_) assert remember.text == 'Oh snap.'
7a070f0b7873d96b2cb5e4c54fb34b3c38d45afb
genome_designer/main/data_util.py
genome_designer/main/data_util.py
""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView(variant, variant_id_to_metadata_dict) for variant in variant_list]
""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView.variant_as_cast_view(variant, variant_id_to_metadata_dict) for variant in variant_list]
Fix bug from last commit.
Fix bug from last commit.
Python
mit
churchlab/millstone,woodymit/millstone,churchlab/millstone,woodymit/millstone,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone_accidental_source,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source
""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView(variant, variant_id_to_metadata_dict) for variant in variant_list] Fix bug from last commit.
""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView.variant_as_cast_view(variant, variant_id_to_metadata_dict) for variant in variant_list]
<commit_before>""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView(variant, variant_id_to_metadata_dict) for variant in variant_list] <commit_msg>Fix bug from last commit.<commit_after>
""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView.variant_as_cast_view(variant, variant_id_to_metadata_dict) for variant in variant_list]
""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView(variant, variant_id_to_metadata_dict) for variant in variant_list] Fix bug from last commit.""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView.variant_as_cast_view(variant, variant_id_to_metadata_dict) for variant in variant_list]
<commit_before>""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView(variant, variant_id_to_metadata_dict) for variant in variant_list] <commit_msg>Fix bug from last commit.<commit_after>""" Common methods for getting data from the backend. These methods are intended to be used by both views.py, which should define only pages, and xhr_handlers.py, which are intended to respond to AJAX requests. """ from main.model_views import CastVariantView from main.model_views import MeltedVariantView from variants.variant_filter import get_variants_that_pass_filter def lookup_variants(reference_genome, combined_filter_string, is_melted): """Lookup the Variants that match the filter specified in the params. Returns: List of CastVariantView or MeltedVariantView objects. """ # Apply the filters. filter_result = get_variants_that_pass_filter( combined_filter_string, reference_genome) variant_list = filter_result.variant_set variant_id_to_metadata_dict = filter_result.variant_id_to_metadata_dict # Convert to appropriate view objects. if is_melted: melted_variant_list = [] for variant in variant_list: melted_variant_list.extend( MeltedVariantView.variant_as_melted_list(variant, variant_id_to_metadata_dict)) return melted_variant_list else: return [CastVariantView.variant_as_cast_view(variant, variant_id_to_metadata_dict) for variant in variant_list]
e6c43333c3939247534ddee4c419dcdcff5eda5f
spyder_terminal/server/rest/term_rest.py
spyder_terminal/server/rest/term_rest.py
# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', None, 23)) cols = int(self.get_argument('cols', None, 73)) self.application.term_manager.resize_term(pid, rows, cols)
# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) self.application.term_manager.resize_term(pid, rows, cols)
Change default terminal resize arguments
Change default terminal resize arguments
Python
mit
andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal
# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', None, 23)) cols = int(self.get_argument('cols', None, 73)) self.application.term_manager.resize_term(pid, rows, cols) Change default terminal resize arguments
# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) self.application.term_manager.resize_term(pid, rows, cols)
<commit_before># -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', None, 23)) cols = int(self.get_argument('cols', None, 73)) self.application.term_manager.resize_term(pid, rows, cols) <commit_msg>Change default terminal resize arguments<commit_after>
# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) self.application.term_manager.resize_term(pid, rows, cols)
# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', None, 23)) cols = int(self.get_argument('cols', None, 73)) self.application.term_manager.resize_term(pid, rows, cols) Change default terminal resize arguments# -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) self.application.term_manager.resize_term(pid, rows, cols)
<commit_before># -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', None, 23)) cols = int(self.get_argument('cols', None, 73)) self.application.term_manager.resize_term(pid, rows, cols) <commit_msg>Change default terminal resize arguments<commit_after># -*- coding: iso-8859-15 -*- """Main HTTP routes request handlers.""" import tornado.web import tornado.escape from os import getcwd class MainHandler(tornado.web.RequestHandler): """Handles creation of new terminals.""" @tornado.gen.coroutine def post(self): """POST verb: Create a new terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) cwd = self.get_cookie('cwd', default=getcwd()) self.application.logger.info('CWD: {0}'.format(cwd)) self.application.logger.info('Size: ({0}, {1})'.format(cols, rows)) pid = yield self.application.term_manager.create_term(rows, cols, cwd) self.write(pid) class ResizeHandler(tornado.web.RequestHandler): """Handles resizing of terminals.""" @tornado.gen.coroutine def post(self, pid): """POST verb: Resize a terminal.""" rows = int(self.get_argument('rows', default=23)) cols = int(self.get_argument('cols', default=73)) self.application.term_manager.resize_term(pid, rows, cols)
00ef4db967b00c5cef5c72d5266327bbd9db5909
ibmcnx/test/loadFunction.py
ibmcnx/test/loadFunction.py
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read()
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
Customize scripts to work with menu
Customize scripts to work with menu
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read() Customize scripts to work with menu
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
<commit_before> import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read() <commit_msg>Customize scripts to work with menu<commit_after>
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read() Customize scripts to work with menu import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
<commit_before> import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() def loadFilesService(): global globdict exec open("filesAdmin.py").read() <commit_msg>Customize scripts to work with menu<commit_after> import sys from java.lang import String from java.util import HashSet from java.util import HashMap import java import lotusConnectionsCommonAdmin globdict = globals() locdict = locals() def loadFilesService(): global globdict global locdict execfile("filesAdmin.py",globdict,locdict)
2e9472e4989985ebdb770c193856a02616a3d8e4
plugoo/assets.py
plugoo/assets.py
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: parsed_line = self.parse_line(line) if parsed_line: return parsed_line else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
Add a method for line by line asset parsing
Add a method for line by line asset parsing
Python
bsd-2-clause
0xPoly/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,hackerberry/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration Add a method for line by line asset parsing
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: parsed_line = self.parse_line(line) if parsed_line: return parsed_line else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
<commit_before>class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration <commit_msg>Add a method for line by line asset parsing<commit_after>
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: parsed_line = self.parse_line(line) if parsed_line: return parsed_line else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration Add a method for line by line asset parsingclass Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: parsed_line = self.parse_line(line) if parsed_line: return parsed_line else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
<commit_before>class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: return line.replace('\n','') else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration <commit_msg>Add a method for line by line asset parsing<commit_after>class Asset: """ This is an ooni-probe asset. It is a python iterator object, allowing it to be efficiently looped. To create your own custom asset your should subclass this and override the next_asset method and the len method for computing the length of the asset. """ def __init__(self, file=None, *args, **argv): self.fh = None if file: self.name = file self.fh = open(file, 'r') self.eof = False def __iter__(self): return self def len(self): """ Returns the length of the asset """ for i, l in enumerate(self.fh): pass # rewind the file self.fh.seek(0) return i + 1 def parse_line(self, line): """ Override this method if you need line by line parsing of an Asset. """ return line.replace('\n','') def next_asset(self): """ Return the next asset. """ # XXX this is really written with my feet. # clean me up please... line = self.fh.readline() if line: parsed_line = self.parse_line(line) if parsed_line: return parsed_line else: self.fh.seek(0) raise StopIteration def next(self): try: return self.next_asset() except: raise StopIteration
f0a1a75e6596f16b6e708c06a6c450d9acba2299
faas/puzzleboard-pop/puzzleboard_pop.py
faas/puzzleboard-pop/puzzleboard_pop.py
import json from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } send_consumed(pboard) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' requests.post(url, data)
import json import logging import sys from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def __init__(self): logging.basicConfig(stream=sys.stderr) def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } rc = send_consumed(pboard) logging.info(rc) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' return requests.post(url, data)
Add logging as a test
Add logging as a test
Python
mit
klmcwhirter/huntwords,klmcwhirter/huntwords,klmcwhirter/huntwords,klmcwhirter/huntwords
import json from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } send_consumed(pboard) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' requests.post(url, data) Add logging as a test
import json import logging import sys from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def __init__(self): logging.basicConfig(stream=sys.stderr) def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } rc = send_consumed(pboard) logging.info(rc) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' return requests.post(url, data)
<commit_before>import json from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } send_consumed(pboard) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' requests.post(url, data) <commit_msg>Add logging as a test<commit_after>
import json import logging import sys from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def __init__(self): logging.basicConfig(stream=sys.stderr) def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } rc = send_consumed(pboard) logging.info(rc) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' return requests.post(url, data)
import json from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } send_consumed(pboard) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' requests.post(url, data) Add logging as a testimport json import logging import sys from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def __init__(self): logging.basicConfig(stream=sys.stderr) def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } rc = send_consumed(pboard) logging.info(rc) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' return requests.post(url, data)
<commit_before>import json from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } send_consumed(pboard) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' requests.post(url, data) <commit_msg>Add logging as a test<commit_after>import json import logging import sys from datetime import datetime import requests from .model.puzzleboard import pop_puzzleboard class HuntwordsPuzzleBoardPopCommand(object): '''Command class that processes puzzleboard-pop message''' def __init__(self): logging.basicConfig(stream=sys.stderr) def run(self, jreq): '''Command that processes puzzleboard-pop message''' req = json.loads(jreq) pboard = pop_puzzleboard(req['puzzle']) jpboard = json.dumps(dict(pboard)) resp = { 'puzzleboard': jpboard, 'processed': { 'at': f'{datetime.now().isoformat()}', 'status': 'ok' } } rc = send_consumed(pboard) logging.info(rc) return json.dumps(resp) def send_consumed(pboard): '''Send async request to generate a new copy''' url = 'http://puzzleboard-consumed.openfaas-fn:8080' data = f'{{"puzzle": "{pboard.puzzle.name}" }}' return requests.post(url, data)
d6940b3ff80190f87bf7d5336b9c54dc160da12a
helpers/team_manipulator.py
helpers/team_manipulator.py
from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
Remove references to NdbManipulatorBase, which never really happened.
Remove references to NdbManipulatorBase, which never really happened.
Python
mit
1fish2/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance
from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team Remove references to NdbManipulatorBase, which never really happened.
from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
<commit_before>from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team <commit_msg>Remove references to NdbManipulatorBase, which never really happened.<commit_after>
from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team Remove references to NdbManipulatorBase, which never really happened.from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
<commit_before>from helpers.ndb_manipulator_base import NdbManipulatorBase class TeamManipulator(NdbManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team <commit_msg>Remove references to NdbManipulatorBase, which never really happened.<commit_after>from helpers.manipulator_base import ManipulatorBase class TeamManipulator(ManipulatorBase): """ Handle Team database writes. """ @classmethod def updateMerge(self, new_team, old_team): """ Given an "old" and a "new" Team object, replace the fields in the "old" team that are present in the "new" team, but keep fields from the "old" team that are null in the "new" team. """ attrs = [ "address", "name", "nickname", "website", ] for attr in attrs: if getattr(new_team, attr) is not None: if getattr(new_team, attr) != getattr(old_team, attr): setattr(old_team, attr, getattr(new_team, attr)) old_team.dirty = True # Take the new tpid and tpid_year iff the year is newer than the old one if (new_team.first_tpid_year > old_team.first_tpid_year): old_team.first_tpid_year = new_team.first_tpid_year old_team.first_tpid = new_team.first_tpid old_team.dirty = True return old_team
917feadfff4d38fe9b28e9451eb2def1438789fd
Mscthesis/IO/model_report.py
Mscthesis/IO/model_report.py
""" Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, 'Images')) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(dirname, 'Images'), 'net_hist') fig2.savefig(join(dirname, 'Images'), 'heat_net') return fig1, fig2
""" Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, reportname)) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(join(dirname, 'Images'), 'net_hist')) fig2.savefig(join(join(dirname, 'Images'), 'heat_net')) return fig1, fig2
Debug bad creation of folders.
Debug bad creation of folders.
Python
mit
tgquintela/Mscthesis
""" Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, 'Images')) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(dirname, 'Images'), 'net_hist') fig2.savefig(join(dirname, 'Images'), 'heat_net') return fig1, fig2 Debug bad creation of folders.
""" Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, reportname)) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(join(dirname, 'Images'), 'net_hist')) fig2.savefig(join(join(dirname, 'Images'), 'heat_net')) return fig1, fig2
<commit_before> """ Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, 'Images')) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(dirname, 'Images'), 'net_hist') fig2.savefig(join(dirname, 'Images'), 'heat_net') return fig1, fig2 <commit_msg>Debug bad creation of folders.<commit_after>
""" Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, reportname)) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(join(dirname, 'Images'), 'net_hist')) fig2.savefig(join(join(dirname, 'Images'), 'heat_net')) return fig1, fig2
""" Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, 'Images')) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(dirname, 'Images'), 'net_hist') fig2.savefig(join(dirname, 'Images'), 'heat_net') return fig1, fig2 Debug bad creation of folders. """ Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, reportname)) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(join(dirname, 'Images'), 'net_hist')) fig2.savefig(join(join(dirname, 'Images'), 'heat_net')) return fig1, fig2
<commit_before> """ Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, 'Images')) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(dirname, 'Images'), 'net_hist') fig2.savefig(join(dirname, 'Images'), 'heat_net') return fig1, fig2 <commit_msg>Debug bad creation of folders.<commit_after> """ Module used to group the functions and utils to built a report from a model application. """ from Mscthesis.Plotting.net_plotting import plot_net_distribution,\ plot_heat_net from os.path import exists, join from os import makedirs def create_model_report(net, sectors, dirname, reportname): "Creation of a report for the model applied." # Check and create the folders if not exists(dirname): makedirs(dirname) if not exists(join(dirname, reportname)): makedirs(join(dirname, reportname)) if not exists(join(join(dirname, reportname), 'Images')): makedirs(join(join(dirname, reportname), 'Images')) # Creation of the plots fig1 = plot_net_distribution(net, 50) fig2 = plot_heat_net(net, sectors) fig1.savefig(join(join(dirname, 'Images'), 'net_hist')) fig2.savefig(join(join(dirname, 'Images'), 'heat_net')) return fig1, fig2
da514177b10eb510a2e17cc1fbce7c76946fe25e
OctoPrintOutputController.py
OctoPrintOutputController.py
# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: x = -y if axis_information["z"].inverted: x = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")
# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: y = -y if axis_information["z"].inverted: z = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")
Fix jogging with Y or Z axes inverted
Fix jogging with Y or Z axes inverted
Python
agpl-3.0
fieldOfView/OctoPrintPlugin
# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: x = -y if axis_information["z"].inverted: x = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")Fix jogging with Y or Z axes inverted
# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: y = -y if axis_information["z"].inverted: z = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")
<commit_before># Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: x = -y if axis_information["z"].inverted: x = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")<commit_msg>Fix jogging with Y or Z axes inverted<commit_after>
# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: y = -y if axis_information["z"].inverted: z = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")
# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: x = -y if axis_information["z"].inverted: x = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")Fix jogging with Y or Z axes inverted# Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: y = -y if axis_information["z"].inverted: z = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")
<commit_before># Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: x = -y if axis_information["z"].inverted: x = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")<commit_msg>Fix jogging with Y or Z axes inverted<commit_after># Copyright (c) 2020 Aldo Hoeben / fieldOfView # OctoPrintPlugin is released under the terms of the AGPLv3 or higher. from cura.PrinterOutput.GenericOutputController import GenericOutputController class OctoPrintOutputController(GenericOutputController): def __init__(self, output_device: "PrinterOutputDevice") -> None: super().__init__(output_device) def moveHead(self, printer: "PrinterOutputModel", x, y, z, speed) -> None: axis_information = self._output_device.getAxisInformation() if axis_information["x"].inverted: x = -x if axis_information["y"].inverted: y = -y if axis_information["z"].inverted: z = -z self._output_device.sendCommand("G91") self._output_device.sendCommand("G0 X%s Y%s Z%s F%s" % (x, y, z, speed)) self._output_device.sendCommand("G90")
51357755abe17adede5bbf7ccb3ba94c4ea701a9
src/libmv/multiview/panography_coeffs.py
src/libmv/multiview/panography_coeffs.py
import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }'
# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister. # International Conference on Computer Vision and Pattern Recognition # (CVPR2007). Minneapolis, June 2007. import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' # As long as Python code do not return the correct value # I prefer use "Xcas Computer Algebra System" # http://www-fourier.ujf-grenoble.fr/~parisse/giac.html # Solution for the focal length of a pair of images from a rotating camera. # comment(" Define the base equation that share the Focal length 'f' ") # FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f)); # comment(" Solve equation=0 with the unknow 'f' ") # solve(FocalSolver=0,f);
Add xcas source code to obtain panography shared Focal polynomial solver.
Add xcas source code to obtain panography shared Focal polynomial solver.
Python
mit
petertsoi/libmv,libmv/libmv,SoylentGraham/libmv,libmv/libmv,SoylentGraham/libmv,rgkoo/libmv-blender,Nazg-Gul/libmv,Nazg-Gul/libmv,petertsoi/libmv,SoylentGraham/libmv,keir/libmv,keir/libmv,SoylentGraham/libmv,keir/libmv,rgkoo/libmv-blender,Nazg-Gul/libmv,libmv/libmv,rgkoo/libmv-blender,rgkoo/libmv-blender,Nazg-Gul/libmv,keir/libmv,rgkoo/libmv-blender,petertsoi/libmv,petertsoi/libmv,libmv/libmv,Nazg-Gul/libmv
import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' Add xcas source code to obtain panography shared Focal polynomial solver.
# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister. # International Conference on Computer Vision and Pattern Recognition # (CVPR2007). Minneapolis, June 2007. import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' # As long as Python code do not return the correct value # I prefer use "Xcas Computer Algebra System" # http://www-fourier.ujf-grenoble.fr/~parisse/giac.html # Solution for the focal length of a pair of images from a rotating camera. # comment(" Define the base equation that share the Focal length 'f' ") # FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f)); # comment(" Solve equation=0 with the unknow 'f' ") # solve(FocalSolver=0,f);
<commit_before>import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' <commit_msg>Add xcas source code to obtain panography shared Focal polynomial solver.<commit_after>
# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister. # International Conference on Computer Vision and Pattern Recognition # (CVPR2007). Minneapolis, June 2007. import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' # As long as Python code do not return the correct value # I prefer use "Xcas Computer Algebra System" # http://www-fourier.ujf-grenoble.fr/~parisse/giac.html # Solution for the focal length of a pair of images from a rotating camera. # comment(" Define the base equation that share the Focal length 'f' ") # FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f)); # comment(" Solve equation=0 with the unknow 'f' ") # solve(FocalSolver=0,f);
import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' Add xcas source code to obtain panography shared Focal polynomial solver.# Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister. # International Conference on Computer Vision and Pattern Recognition # (CVPR2007). Minneapolis, June 2007. import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' # As long as Python code do not return the correct value # I prefer use "Xcas Computer Algebra System" # http://www-fourier.ujf-grenoble.fr/~parisse/giac.html # Solution for the focal length of a pair of images from a rotating camera. # comment(" Define the base equation that share the Focal length 'f' ") # FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f)); # comment(" Solve equation=0 with the unknow 'f' ") # solve(FocalSolver=0,f);
<commit_before>import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' <commit_msg>Add xcas source code to obtain panography shared Focal polynomial solver.<commit_after># Minimal Solutions for Panoramic Stitching. M. Brown, R. Hartley and D. Nister. # International Conference on Computer Vision and Pattern Recognition # (CVPR2007). Minneapolis, June 2007. import sympy f2, a12, a1, a2, b12, b1, b2 = sympy.symbols('f2 a12 a1 a2 b12 b1 b2') # Equation 12 from the brown paper; see panography.h equation_12 = ((a12 + f2)**2 * (b1 + f2) * (b2 + f2) - (b12 + f2)**2 * (a1 + f2) * (a2 + f2)) d = equation_12.as_poly(f2).as_dict() print ' // Coefficients in ascending powers of alpha, i.e. P[N]*x^N.' print ' double P[4] = {' for i in range(4): print ' ', sympy.printing.ccode(d[(i,)]) print ' }' # As long as Python code do not return the correct value # I prefer use "Xcas Computer Algebra System" # http://www-fourier.ujf-grenoble.fr/~parisse/giac.html # Solution for the focal length of a pair of images from a rotating camera. # comment(" Define the base equation that share the Focal length 'f' ") # FocalSolver:=(a12+f)^2 / (( a1+f)*(a2+f)) - (b12+f)^2 / ((b1+f)*(b2+f)); # comment(" Solve equation=0 with the unknow 'f' ") # solve(FocalSolver=0,f);
4c9eeb2584408cd15803e1a6d980d4a73d49e73c
Software/Python/grove_sound_sensor.py
Software/Python/grove_sound_sensor.py
# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error"
# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value: grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error"
Fix typo on sound sensor example
Fix typo on sound sensor example
Python
mit
NeuroRoboticTech/Jetduino,penoud/GrovePi,stwolny/GrovePi,rpedersen/GrovePi,nerginer/GrovePi,nerginer/GrovePi,nerginer/GrovePi,NeuroRoboticTech/Jetduino,NeuroRoboticTech/Jetduino,stwolny/GrovePi,NeuroRoboticTech/Jetduino,rpedersen/GrovePi,karan259/GrovePi,stwolny/GrovePi,rpedersen/GrovePi,rpedersen/GrovePi,stwolny/GrovePi,nerginer/GrovePi,NeuroRoboticTech/Jetduino,karan259/GrovePi,penoud/GrovePi,rpedersen/GrovePi,stwolny/GrovePi,rpedersen/GrovePi,nerginer/GrovePi,karan259/GrovePi,stwolny/GrovePi,penoud/GrovePi,karan259/GrovePi,NeuroRoboticTech/Jetduino,nerginer/GrovePi,karan259/GrovePi,penoud/GrovePi,rpedersen/GrovePi,karan259/GrovePi,penoud/GrovePi,rpedersen/GrovePi,stwolny/GrovePi,karan259/GrovePi,karan259/GrovePi,NeuroRoboticTech/Jetduino,nerginer/GrovePi,penoud/GrovePi,nerginer/GrovePi,penoud/GrovePi,penoud/GrovePi,karan259/GrovePi,stwolny/GrovePi
# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error" Fix typo on sound sensor example
# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value: grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error"
<commit_before># GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error" <commit_msg>Fix typo on sound sensor example<commit_after>
# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value: grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error"
# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error" Fix typo on sound sensor example# GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value: grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error"
<commit_before># GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error" <commit_msg>Fix typo on sound sensor example<commit_after># GrovePi + Sound Sensor + LED # http://www.seeedstudio.com/wiki/Grove_-_Sound_Sensor # http://www.seeedstudio.com/wiki/Grove_-_LED_Socket_Kit import time import grovepi # Connect the Sound Sensor to analog port A0 sound_sensor = 0 # Connect the LED to digital port D5 led = 5 grovepi.pinMode(sound_sensor,"INPUT") grovepi.pinMode(led,"OUTPUT") # The threshold to turn the led on 400.00 * 5 / 1024 = 1.95v threshold_value = 400 while True: try: # Read the sound level sensor_value = grovepi.analogRead(sound_sensor) print sensor_value # If loud, illuminate LED, otherwise dim if sensor_value > threshold_value: grovepi.digitalWrite(led,1) else: grovepi.digitalWrite(led,0) time.sleep(.5) except IOError: print "Error"
7f7d2a1ec73d3baee2373eef8de1eab93efc4ea9
ibmcnx/doc/Documentation.py
ibmcnx/doc/Documentation.py
###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import ibmcnx.filehandle import sys emp1 = ibmcnx.filehandle.Ibmcnxfile() sys.stdout = emp1.askFileParams() print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )
###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import sys import os.path filename = raw_input( 'Path and Filename to Documentation file: ' ) sys.stdout = open( filename, "w") print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )
Create script to save documentation to a file
4: Create script to save documentation to a file Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import ibmcnx.filehandle import sys emp1 = ibmcnx.filehandle.Ibmcnxfile() sys.stdout = emp1.askFileParams() print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )4: Create script to save documentation to a file Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import sys import os.path filename = raw_input( 'Path and Filename to Documentation file: ' ) sys.stdout = open( filename, "w") print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )
<commit_before>###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import ibmcnx.filehandle import sys emp1 = ibmcnx.filehandle.Ibmcnxfile() sys.stdout = emp1.askFileParams() print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )<commit_msg>4: Create script to save documentation to a file Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>
###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import sys import os.path filename = raw_input( 'Path and Filename to Documentation file: ' ) sys.stdout = open( filename, "w") print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )
###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import ibmcnx.filehandle import sys emp1 = ibmcnx.filehandle.Ibmcnxfile() sys.stdout = emp1.askFileParams() print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )4: Create script to save documentation to a file Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import sys import os.path filename = raw_input( 'Path and Filename to Documentation file: ' ) sys.stdout = open( filename, "w") print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )
<commit_before>###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import ibmcnx.filehandle import sys emp1 = ibmcnx.filehandle.Ibmcnxfile() sys.stdout = emp1.askFileParams() print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )<commit_msg>4: Create script to save documentation to a file Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>###### # Create a file (html or markdown) with the output of # - JVMHeap # - LogFiles # - Ports # - Variables # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-08 # # License: Apache 2.0 # # TODO: Create a menu for file selection import sys import os.path filename = raw_input( 'Path and Filename to Documentation file: ' ) sys.stdout = open( filename, "w") print '# JVM Settings of all AppServers:' execfile( 'ibmcnx/doc/JVMSettings.py' ) print '# Used Ports:' execfile( 'ibmcnx/doc/Ports.py' ) print '# LogFile Settgins:' execfile( 'ibmcnx/doc/LogFiles.py' ) print '# WebSphere Variables' execfile( 'ibmcnx/doc/Variables.py' )
3fb0f567dcaf69e4fa9872702ffbfa8ab0e69eaf
lib/utilities/key_exists.py
lib/utilities/key_exists.py
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
Add more error handling to key exists
Add more error handling to key exists
Python
mpl-2.0
mpurzynski/MozDef,mozilla/MozDef,jeffbryner/MozDef,mozilla/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,Phrozyn/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,mpurzynski/MozDef,mozilla/MozDef,mozilla/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,jeffbryner/MozDef
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False Add more error handling to key exists
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
<commit_before> def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False <commit_msg>Add more error handling to key exists<commit_after>
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False Add more error handling to key exists def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
<commit_before> def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False <commit_msg>Add more error handling to key exists<commit_after> def key_exists(search_key, inputed_dict): ''' Given a search key which is dot notated return wether or not that key exists in a dictionary ''' num_levels = search_key.split(".") if len(num_levels) == 0: return False current_pointer = inputed_dict for updated_key in num_levels: if current_pointer is None: return False if updated_key == num_levels[-1]: return updated_key in current_pointer if updated_key in current_pointer: current_pointer = current_pointer[updated_key] else: return False
a3ab513306614393f901e4991886ba93b6ed26a3
cardboard/frontend/testing.py
cardboard/frontend/testing.py
""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingFrontend(FrontendMixin): implements(IFrontend) select = mock_selector("select") select_cards = mock_selector("select_cards") select_players = mock_selector("select_players") select_combined = mock_selector("select_combined") select_range = mock_selector("select_range") def prompt(self, msg): log.msg(msg) def priority_granted(self): pass
""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingSelector(object): choice = mock_selector("choice") cards = mock_selector("cards") players = mock_selector("players") combined = mock_selector("combined") range = mock_selector("range") def __init__(self, frontend): super(TestingSelector, self).__init__() class TestingFrontend(FrontendMixin): implements(IFrontend) info = lambda _, __ : None select = TestingSelector def prompt(self, msg): log.msg(msg) def priority_granted(self): pass
Make TestingFrontend not blow up from info and move select to TestingSelector.
Make TestingFrontend not blow up from info and move select to TestingSelector.
Python
mit
Julian/cardboard,Julian/cardboard
""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingFrontend(FrontendMixin): implements(IFrontend) select = mock_selector("select") select_cards = mock_selector("select_cards") select_players = mock_selector("select_players") select_combined = mock_selector("select_combined") select_range = mock_selector("select_range") def prompt(self, msg): log.msg(msg) def priority_granted(self): pass Make TestingFrontend not blow up from info and move select to TestingSelector.
""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingSelector(object): choice = mock_selector("choice") cards = mock_selector("cards") players = mock_selector("players") combined = mock_selector("combined") range = mock_selector("range") def __init__(self, frontend): super(TestingSelector, self).__init__() class TestingFrontend(FrontendMixin): implements(IFrontend) info = lambda _, __ : None select = TestingSelector def prompt(self, msg): log.msg(msg) def priority_granted(self): pass
<commit_before>""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingFrontend(FrontendMixin): implements(IFrontend) select = mock_selector("select") select_cards = mock_selector("select_cards") select_players = mock_selector("select_players") select_combined = mock_selector("select_combined") select_range = mock_selector("select_range") def prompt(self, msg): log.msg(msg) def priority_granted(self): pass <commit_msg>Make TestingFrontend not blow up from info and move select to TestingSelector.<commit_after>
""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingSelector(object): choice = mock_selector("choice") cards = mock_selector("cards") players = mock_selector("players") combined = mock_selector("combined") range = mock_selector("range") def __init__(self, frontend): super(TestingSelector, self).__init__() class TestingFrontend(FrontendMixin): implements(IFrontend) info = lambda _, __ : None select = TestingSelector def prompt(self, msg): log.msg(msg) def priority_granted(self): pass
""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingFrontend(FrontendMixin): implements(IFrontend) select = mock_selector("select") select_cards = mock_selector("select_cards") select_players = mock_selector("select_players") select_combined = mock_selector("select_combined") select_range = mock_selector("select_range") def prompt(self, msg): log.msg(msg) def priority_granted(self): pass Make TestingFrontend not blow up from info and move select to TestingSelector.""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingSelector(object): choice = mock_selector("choice") cards = mock_selector("cards") players = mock_selector("players") combined = mock_selector("combined") range = mock_selector("range") def __init__(self, frontend): super(TestingSelector, self).__init__() class TestingFrontend(FrontendMixin): implements(IFrontend) info = lambda _, __ : None select = TestingSelector def prompt(self, msg): log.msg(msg) def priority_granted(self): pass
<commit_before>""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingFrontend(FrontendMixin): implements(IFrontend) select = mock_selector("select") select_cards = mock_selector("select_cards") select_players = mock_selector("select_players") select_combined = mock_selector("select_combined") select_range = mock_selector("select_range") def prompt(self, msg): log.msg(msg) def priority_granted(self): pass <commit_msg>Make TestingFrontend not blow up from info and move select to TestingSelector.<commit_after>""" A frontend for use when testing. """ import contextlib from twisted.python import log from zope.interface import implements from cardboard.frontend import FrontendMixin, IFrontend def mock_selector(name): selections = [()] @contextlib.contextmanager def will_return(*selection): selections.append(selection) yield selections.pop() def select(self, *args, **kwargs): return selections[-1] select.__name__ = name select.will_return = will_return return select class TestingSelector(object): choice = mock_selector("choice") cards = mock_selector("cards") players = mock_selector("players") combined = mock_selector("combined") range = mock_selector("range") def __init__(self, frontend): super(TestingSelector, self).__init__() class TestingFrontend(FrontendMixin): implements(IFrontend) info = lambda _, __ : None select = TestingSelector def prompt(self, msg): log.msg(msg) def priority_granted(self): pass
b56c5ca12f9806ecedc531e1f00ec1d7f2162b46
src-django/authentication/urls.py
src-django/authentication/urls.py
from django.conf.urls import url from views import login, logout, signup urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), ]
from django.conf.urls import url from views import login, logout, signup, confirm_email urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), url(r'^confirm_email/(?P<key>\w+)', confirm_email), ]
Add an endpoint for email confirmation
Add an endpoint for email confirmation
Python
bsd-3-clause
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
from django.conf.urls import url from views import login, logout, signup urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), ] Add an endpoint for email confirmation
from django.conf.urls import url from views import login, logout, signup, confirm_email urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), url(r'^confirm_email/(?P<key>\w+)', confirm_email), ]
<commit_before>from django.conf.urls import url from views import login, logout, signup urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), ] <commit_msg>Add an endpoint for email confirmation<commit_after>
from django.conf.urls import url from views import login, logout, signup, confirm_email urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), url(r'^confirm_email/(?P<key>\w+)', confirm_email), ]
from django.conf.urls import url from views import login, logout, signup urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), ] Add an endpoint for email confirmationfrom django.conf.urls import url from views import login, logout, signup, confirm_email urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), url(r'^confirm_email/(?P<key>\w+)', confirm_email), ]
<commit_before>from django.conf.urls import url from views import login, logout, signup urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), ] <commit_msg>Add an endpoint for email confirmation<commit_after>from django.conf.urls import url from views import login, logout, signup, confirm_email urlpatterns = [ url(r'^login', login), url(r'^logout', logout), url(r'^signup', signup), url(r'^confirm_email/(?P<key>\w+)', confirm_email), ]
2f71ee89ab4f268518dc772af5755cca35976c4b
winthrop/annotation/models.py
winthrop/annotation/models.py
from django.db import models from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person # NOTE: needs to be different name than Annotation due to reverse rel issues... class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): print('local info!!!') info = super(Annotation, self).info() info['extra_data'] = 'foo' return info
from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could probably set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = IfPage.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save()
Add canvas lookup by url to associate annotations with iiif canvas uri
Add canvas lookup by url to associate annotations with iiif canvas uri
Python
apache-2.0
Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django
from django.db import models from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person # NOTE: needs to be different name than Annotation due to reverse rel issues... class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): print('local info!!!') info = super(Annotation, self).info() info['extra_data'] = 'foo' return info Add canvas lookup by url to associate annotations with iiif canvas uri
from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could probably set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = IfPage.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save()
<commit_before>from django.db import models from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person # NOTE: needs to be different name than Annotation due to reverse rel issues... class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): print('local info!!!') info = super(Annotation, self).info() info['extra_data'] = 'foo' return info <commit_msg>Add canvas lookup by url to associate annotations with iiif canvas uri<commit_after>
from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could probably set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = IfPage.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save()
from django.db import models from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person # NOTE: needs to be different name than Annotation due to reverse rel issues... class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): print('local info!!!') info = super(Annotation, self).info() info['extra_data'] = 'foo' return info Add canvas lookup by url to associate annotations with iiif canvas urifrom urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could probably set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = IfPage.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save()
<commit_before>from django.db import models from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person # NOTE: needs to be different name than Annotation due to reverse rel issues... class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): print('local info!!!') info = super(Annotation, self).info() info['extra_data'] = 'foo' return info <commit_msg>Add canvas lookup by url to associate annotations with iiif canvas uri<commit_after>from urllib.parse import urlparse from django.db import models from django.urls import resolve, Resolver404 from annotator_store.models import BaseAnnotation from djiffy.models import IfPage from winthrop.people.models import Person class Annotation(BaseAnnotation): # NOTE: do we want to associate explicitly with canvas in the db? # could just use uri, but faster lookup if we associate... canvas = models.ForeignKey(IfPage, null=True, blank=True) author = models.ForeignKey(Person, null=True, blank=True) def info(self): info = super(Annotation, self).info() info['extra_data'] = 'foo' return info def save(self, *args, **kwargs): # NOTE: could probably set the canvas uri in javascript instead # of using page uri, but for now determine canvas id # based on the page uri try: match = resolve(urlparse(self.uri).path) if match.url_name == 'page' and 'djiffy' in match.namespaces: self.canvas = IfPage.objects.get( short_id=match.kwargs['id'], book__short_id=match.kwargs['book_id'] ) except Resolver404: pass super(Annotation, self).save()
ff4c5d14d849db72f3af92bc87ccfc6626ef8744
reporting/job.py
reporting/job.py
#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (int(duration) / float(1000)) if duration else (self.finish_timestamp - self.start_timestamp).total_seconds() @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds
#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration = None): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (self.finish_timestamp - self.start_timestamp).total_seconds() if duration is None else (int(duration) / float(1000)) @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds
Make duration optional to create a Job
Make duration optional to create a Job
Python
mit
luigiberrettini/build-deploy-stats
#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (int(duration) / float(1000)) if duration else (self.finish_timestamp - self.start_timestamp).total_seconds() @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_secondsMake duration optional to create a Job
#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration = None): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (self.finish_timestamp - self.start_timestamp).total_seconds() if duration is None else (int(duration) / float(1000)) @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds
<commit_before>#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (int(duration) / float(1000)) if duration else (self.finish_timestamp - self.start_timestamp).total_seconds() @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds<commit_msg>Make duration optional to create a Job<commit_after>
#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration = None): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (self.finish_timestamp - self.start_timestamp).total_seconds() if duration is None else (int(duration) / float(1000)) @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds
#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (int(duration) / float(1000)) if duration else (self.finish_timestamp - self.start_timestamp).total_seconds() @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_secondsMake duration optional to create a Job#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration = None): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (self.finish_timestamp - self.start_timestamp).total_seconds() if duration is None else (int(duration) / float(1000)) @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds
<commit_before>#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (int(duration) / float(1000)) if duration else (self.finish_timestamp - self.start_timestamp).total_seconds() @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds<commit_msg>Make duration optional to create a Job<commit_after>#!/usr/bin/env python3 from dateutil import parser class Job: def __init__(self, category, id, status, start_timestamp, finish_timestamp, duration = None): self.tool_id = category.tool self.type_id = category.context self.id = id self.status_bit = int(status) self.start_timestamp = parser.parse(start_timestamp) self.finish_timestamp = parser.parse(finish_timestamp) self.duration_seconds = (self.finish_timestamp - self.start_timestamp).total_seconds() if duration is None else (int(duration) / float(1000)) @property def timestamp(self): return self.finish_timestamp @property def tool(self): return self.tool_id @property def type(self): return self.type_id @property def status(self): return self.status_bit @property def duration(self): return self.duration_seconds
c162514291428f26dc78d08393455ff33fe12f12
requests_test.py
requests_test.py
import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code) # Make a get request with the parameters. #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters) #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json #{format}?teamstats={team-stats} #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json
import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code)
Clean up file and remove notes now that parameters in API feed are working
Clean up file and remove notes now that parameters in API feed are working
Python
mit
prcutler/nflpool,prcutler/nflpool
import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code) # Make a get request with the parameters. #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters) #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json #{format}?teamstats={team-stats} #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.jsonClean up file and remove notes now that parameters in API feed are working
import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code)
<commit_before>import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code) # Make a get request with the parameters. #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters) #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json #{format}?teamstats={team-stats} #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json<commit_msg>Clean up file and remove notes now that parameters in API feed are working<commit_after>
import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code)
import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code) # Make a get request with the parameters. #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters) #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json #{format}?teamstats={team-stats} #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.jsonClean up file and remove notes now that parameters in API feed are workingimport requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code)
<commit_before>import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code) # Make a get request with the parameters. #response = requests.get("http://api.open-notify.org/iss-pass.json", params=parameters) #https://www.mysportsfeeds.com/api/feed/pull/nfl/{2016-2017}/playoff_team_standings.json #{format}?teamstats={team-stats} #https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017/playoff_team_standings.json<commit_msg>Clean up file and remove notes now that parameters in API feed are working<commit_after>import requests from requests.auth import HTTPBasicAuth import secret parameters = 'teamstats' response = requests.get('https://www.mysportsfeeds.com/api/feed/pull/nfl/2016-2017-regular/playoff_team_standings.json?teamstats', auth=HTTPBasicAuth(secret.msf_username, secret.msf_pw)) print(response.status_code)
0ff15389ef24862f4f1b5b8923d6ca057018ae1a
polling_stations/apps/pollingstations/migrations/0010_auto_20160417_1416.py
polling_stations/apps/pollingstations/migrations/0010_auto_20160417_1416.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0009_customfinder'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ]
Edit migration so it depends on 0009_customfinder
Edit migration so it depends on 0009_customfinder Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #240
Python
bsd-3-clause
DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ] Edit migration so it depends on 0009_customfinder Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #240
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0009_customfinder'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ] <commit_msg>Edit migration so it depends on 0009_customfinder Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #240<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0009_customfinder'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ]
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ] Edit migration so it depends on 0009_customfinder Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #240# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0009_customfinder'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ]
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0006_residentialaddress_slug'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ] <commit_msg>Edit migration so it depends on 0009_customfinder Ensure the migrations will apply correctly without conflcit once merged Merging this branch is now blocked on PR #240<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('pollingstations', '0009_customfinder'), ] operations = [ migrations.AlterUniqueTogether( name='pollingdistrict', unique_together=set([('council', 'internal_council_id')]), ), migrations.AlterUniqueTogether( name='pollingstation', unique_together=set([('council', 'internal_council_id')]), ), ]
b39bf8ba3fc3d2037a293db7d5856d02ac047a31
nbt/__init__.py
nbt/__init__.py
from nbt import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)
__all__ = ["chunk", "region", "nbt"] from . import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)
Use relative import (required for Python 3, supported in 2.6 or higher)
Use relative import (required for Python 3, supported in 2.6 or higher)
Python
mit
macfreek/NBT,twoolie/NBT,fwaggle/NBT,devmario/NBT,cburschka/NBT
from nbt import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)Use relative import (required for Python 3, supported in 2.6 or higher)
__all__ = ["chunk", "region", "nbt"] from . import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)
<commit_before>from nbt import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)<commit_msg>Use relative import (required for Python 3, supported in 2.6 or higher)<commit_after>
__all__ = ["chunk", "region", "nbt"] from . import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)
from nbt import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)Use relative import (required for Python 3, supported in 2.6 or higher)__all__ = ["chunk", "region", "nbt"] from . import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)
<commit_before>from nbt import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)<commit_msg>Use relative import (required for Python 3, supported in 2.6 or higher)<commit_after>__all__ = ["chunk", "region", "nbt"] from . import * VERSION = (1, 1) def _get_version(): return ".".join(VERSION)