commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
b81ace397887cb6d0fc7db21d623667223adbfbf
python/frequency_queries.py
python/frequency_queries.py
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) print(occurences) print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] array = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): if (occurences[value] > 0): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) # print(occurences) # print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
Fix bug with negative counts
Fix bug with negative counts
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) print(occurences) print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close() Fix bug with negative counts
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] array = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): if (occurences[value] > 0): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) # print(occurences) # print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
<commit_before>#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) print(occurences) print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close() <commit_msg>Fix bug with negative counts<commit_after>
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] array = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): if (occurences[value] > 0): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) # print(occurences) # print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) print(occurences) print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close() Fix bug with negative counts#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] array = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): if (occurences[value] > 0): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) # print(occurences) # print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
<commit_before>#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) print(occurences) print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close() <commit_msg>Fix bug with negative counts<commit_after>#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] array = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): if (occurences[value] > 0): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) # print(occurences) # print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
8b731102036583099bda475ec1a857b19ff18f80
minimal.py
minimal.py
import sys from django.conf import settings from django.conf.urls import url from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ url(r'^$', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv)
import sys from django.conf import settings from django.urls import path from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ path(r'', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv)
Update `urlpatterns` for 4.0 removal of `django.conf.urls.url()`
Update `urlpatterns` for 4.0 removal of `django.conf.urls.url()`
Python
mit
rnevius/minimal-django
import sys from django.conf import settings from django.conf.urls import url from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ url(r'^$', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv) Update `urlpatterns` for 4.0 removal of `django.conf.urls.url()`
import sys from django.conf import settings from django.urls import path from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ path(r'', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv)
<commit_before>import sys from django.conf import settings from django.conf.urls import url from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ url(r'^$', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv) <commit_msg>Update `urlpatterns` for 4.0 removal of `django.conf.urls.url()`<commit_after>
import sys from django.conf import settings from django.urls import path from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ path(r'', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv)
import sys from django.conf import settings from django.conf.urls import url from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ url(r'^$', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv) Update `urlpatterns` for 4.0 removal of `django.conf.urls.url()`import sys from django.conf import settings from django.urls import path from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ path(r'', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv)
<commit_before>import sys from django.conf import settings from django.conf.urls import url from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ url(r'^$', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv) <commit_msg>Update `urlpatterns` for 4.0 removal of `django.conf.urls.url()`<commit_after>import sys from django.conf import settings from django.urls import path from django.core.management import execute_from_command_line from django.http import HttpResponse settings.configure( DEBUG=True, ROOT_URLCONF=sys.modules[__name__], ) def index(request): return HttpResponse('<h1>A minimal Django response!</h1>') urlpatterns = [ path(r'', index), ] if __name__ == '__main__': execute_from_command_line(sys.argv)
fdc0bb75271b90a31072f79b95283e1156d50181
waffle/decorators.py
waffle/decorators.py
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
Make the decorator actually work again.
Make the decorator actually work again.
Python
bsd-3-clause
isotoma/django-waffle,TwigWorld/django-waffle,rlr/django-waffle,webus/django-waffle,groovecoder/django-waffle,JeLoueMonCampingCar/django-waffle,crccheck/django-waffle,safarijv/django-waffle,paulcwatts/django-waffle,JeLoueMonCampingCar/django-waffle,11craft/django-waffle,festicket/django-waffle,styleseat/django-waffle,mark-adams/django-waffle,rodgomes/django-waffle,crccheck/django-waffle,mark-adams/django-waffle,webus/django-waffle,groovecoder/django-waffle,mwaaas/django-waffle-session,hwkns/django-waffle,VladimirFilonov/django-waffle,ekohl/django-waffle,festicket/django-waffle,paulcwatts/django-waffle,TwigWorld/django-waffle,mwaaas/django-waffle-session,VladimirFilonov/django-waffle,rlr/django-waffle,willkg/django-waffle,engagespark/django-waffle,hwkns/django-waffle,crccheck/django-waffle,JeLoueMonCampingCar/django-waffle,TwigWorld/django-waffle,hwkns/django-waffle,safarijv/django-waffle,webus/django-waffle,rodgomes/django-waffle,engagespark/django-waffle,safarijv/django-waffle,festicket/django-waffle,groovecoder/django-waffle,styleseat/django-waffle,mwaaas/django-waffle-session,mark-adams/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,webus/django-waffle,rlr/django-waffle,willkg/django-waffle,ilanbm/django-waffle,festicket/django-waffle,crccheck/django-waffle,rodgomes/django-waffle,ilanbm/django-waffle,ekohl/django-waffle,groovecoder/django-waffle,hwkns/django-waffle,isotoma/django-waffle,11craft/django-waffle,rlr/django-waffle,ilanbm/django-waffle,JeLoueMonCampingCar/django-waffle,isotoma/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,engagespark/django-waffle,engagespark/django-waffle,rsalmaso/django-waffle,styleseat/django-waffle,mark-adams/django-waffle,rsalmaso/django-waffle,isotoma/django-waffle,rsalmaso/django-waffle,rsalmaso/django-waffle,rodgomes/django-waffle,mwaaas/django-waffle-session,styleseat/django-waffle,ilanbm/django-waffle,safarijv/django-waffle
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator Make the decorator actually work again.
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
<commit_before>from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator <commit_msg>Make the decorator actually work again.<commit_after>
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator Make the decorator actually work again.from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
<commit_before>from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator <commit_msg>Make the decorator actually work again.<commit_after>from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
6c578b67753e7a3fd646e5d91259b50c0b39bec6
tests/test_add_target.py
tests/test_add_target.py
""" Tests for helper function for adding a target to a Vuforia database. """ import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image)
""" Tests for helper function for adding a target to a Vuforia database. """ import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
Add test for custom base URL
Add test for custom base URL
Python
mit
adamtheturtle/vws-python,adamtheturtle/vws-python
""" Tests for helper function for adding a target to a Vuforia database. """ import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) Add test for custom base URL
""" Tests for helper function for adding a target to a Vuforia database. """ import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
<commit_before>""" Tests for helper function for adding a target to a Vuforia database. """ import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) <commit_msg>Add test for custom base URL<commit_after>
""" Tests for helper function for adding a target to a Vuforia database. """ import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
""" Tests for helper function for adding a target to a Vuforia database. """ import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) Add test for custom base URL""" Tests for helper function for adding a target to a Vuforia database. """ import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
<commit_before>""" Tests for helper function for adding a target to a Vuforia database. """ import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) <commit_msg>Add test for custom base URL<commit_after>""" Tests for helper function for adding a target to a Vuforia database. """ import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
b6711f27146279ee419143b560cf32d3b3dfc80c
tools/conan/conanfile.py
tools/conan/conanfile.py
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def imports(self): self.copy("*.dll", "bin", "bin") def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
Include binaries when importing (for Windows).
Include binaries when importing (for Windows).
Python
lgpl-2.1
worldforge/varconf,worldforge/varconf,worldforge/varconf,worldforge/varconf
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass Include binaries when importing (for Windows).
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def imports(self): self.copy("*.dll", "bin", "bin") def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
<commit_before>from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass <commit_msg>Include binaries when importing (for Windows).<commit_after>
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def imports(self): self.copy("*.dll", "bin", "bin") def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass Include binaries when importing (for Windows).from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def imports(self): self.copy("*.dll", "bin", "bin") def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
<commit_before>from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass <commit_msg>Include binaries when importing (for Windows).<commit_after>from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def imports(self): self.copy("*.dll", "bin", "bin") def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
4307fa24a27a2c623836a7518e3aceb4546abcf6
scholrroles/behaviour.py
scholrroles/behaviour.py
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) print method, hasattr(obj, method), getattr(obj, method), callable(getattr(obj, method)) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
Validate Model function to allow permission
Validate Model function to allow permission
Python
bsd-3-clause
Scholr/scholr-roles
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()Validate Model function to allow permission
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) print method, hasattr(obj, method), getattr(obj, method), callable(getattr(obj, method)) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
<commit_before>from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()<commit_msg>Validate Model function to allow permission<commit_after>
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) print method, hasattr(obj, method), getattr(obj, method), callable(getattr(obj, method)) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()Validate Model function to allow permissionfrom collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) print method, hasattr(obj, method), getattr(obj, method), callable(getattr(obj, method)) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
<commit_before>from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()<commit_msg>Validate Model function to allow permission<commit_after>from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) print method, hasattr(obj, method), getattr(obj, method), callable(getattr(obj, method)) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
bdd842f55f3a234fefee4cd2a701fa23e07c3789
scikits/umfpack/setup.py
scikits/umfpack/setup.py
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
Python
bsd-3-clause
scikit-umfpack/scikit-umfpack,scikit-umfpack/scikit-umfpack,rc/scikit-umfpack-rc,rc/scikit-umfpack,rc/scikit-umfpack,rc/scikit-umfpack-rc
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
<commit_before>#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) <commit_msg>Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.<commit_after>
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
<commit_before>#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict()) <commit_msg>Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.<commit_after>#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
6c4a2d6f80d7ee5f9c06c3d678bb86661c94a793
tools/np_suppressions.py
tools/np_suppressions.py
suppressions = [ [ ".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ ".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ ".*/multiarray/common\.", "PyCapsule_Check" ], ]
suppressions = [ # This one cannot be covered by any Python language test because there is # no code pathway to it. But it is part of the C API, so must not be # excised from the code. [ r".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ r".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ r".*/multiarray/common\.", "PyCapsule_Check" ], ]
Add documentation on one assertion, convert RE's to raw strings.
Add documentation on one assertion, convert RE's to raw strings.
Python
bsd-3-clause
numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor
suppressions = [ [ ".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ ".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ ".*/multiarray/common\.", "PyCapsule_Check" ], ] Add documentation on one assertion, convert RE's to raw strings.
suppressions = [ # This one cannot be covered by any Python language test because there is # no code pathway to it. But it is part of the C API, so must not be # excised from the code. [ r".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ r".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ r".*/multiarray/common\.", "PyCapsule_Check" ], ]
<commit_before>suppressions = [ [ ".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ ".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ ".*/multiarray/common\.", "PyCapsule_Check" ], ] <commit_msg>Add documentation on one assertion, convert RE's to raw strings.<commit_after>
suppressions = [ # This one cannot be covered by any Python language test because there is # no code pathway to it. But it is part of the C API, so must not be # excised from the code. [ r".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ r".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ r".*/multiarray/common\.", "PyCapsule_Check" ], ]
suppressions = [ [ ".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ ".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ ".*/multiarray/common\.", "PyCapsule_Check" ], ] Add documentation on one assertion, convert RE's to raw strings.suppressions = [ # This one cannot be covered by any Python language test because there is # no code pathway to it. But it is part of the C API, so must not be # excised from the code. [ r".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ r".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ r".*/multiarray/common\.", "PyCapsule_Check" ], ]
<commit_before>suppressions = [ [ ".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ ".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ ".*/multiarray/common\.", "PyCapsule_Check" ], ] <commit_msg>Add documentation on one assertion, convert RE's to raw strings.<commit_after>suppressions = [ # This one cannot be covered by any Python language test because there is # no code pathway to it. But it is part of the C API, so must not be # excised from the code. [ r".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ r".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ r".*/multiarray/common\.", "PyCapsule_Check" ], ]
dcba8b90b84506a7325f8e576d10ccb8d2e9a415
setuptools/py24compat.py
setuptools/py24compat.py
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x try: import hashlib except ImportError: from setuptools._backport import hashlib
Add a shim for python 2.4 compatability with hashlib
Add a shim for python 2.4 compatability with hashlib --HG-- extra : rebase_source : 5f573e600aadbe9c95561ee28c05cee02c7db559
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x Add a shim for python 2.4 compatability with hashlib --HG-- extra : rebase_source : 5f573e600aadbe9c95561ee28c05cee02c7db559
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x try: import hashlib except ImportError: from setuptools._backport import hashlib
<commit_before>""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x <commit_msg>Add a shim for python 2.4 compatability with hashlib --HG-- extra : rebase_source : 5f573e600aadbe9c95561ee28c05cee02c7db559<commit_after>
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x try: import hashlib except ImportError: from setuptools._backport import hashlib
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x Add a shim for python 2.4 compatability with hashlib --HG-- extra : rebase_source : 5f573e600aadbe9c95561ee28c05cee02c7db559""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x try: import hashlib except ImportError: from setuptools._backport import hashlib
<commit_before>""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x <commit_msg>Add a shim for python 2.4 compatability with hashlib --HG-- extra : rebase_source : 5f573e600aadbe9c95561ee28c05cee02c7db559<commit_after>""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x try: import hashlib except ImportError: from setuptools._backport import hashlib
03977d24d5862373a881b7098bc78adc30fe8256
make_src_bem.py
make_src_bem.py
from __future__ import print_function import mne from my_settings import * subject = sys.argv[1] # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src) conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=None, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
from __future__ import print_function import mne import subprocess from my_settings import * subject = sys.argv[1] cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis" # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src) setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % ( subject) subprocess.call([cmd, "1", setup_forward]) # conductivity = (0.3, 0.006, 0.3) # for three layers # model = mne.make_bem_model(subject=subject, ico=None, # conductivity=conductivity, # subjects_dir=subjects_dir) # bem = mne.make_bem_solution(model) # mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
Change to make BEM solution from mne-C
Change to make BEM solution from mne-C
Python
bsd-3-clause
MadsJensen/RP_scripts,MadsJensen/RP_scripts,MadsJensen/RP_scripts
from __future__ import print_function import mne from my_settings import * subject = sys.argv[1] # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src) conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=None, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject) Change to make BEM solution from mne-C
from __future__ import print_function import mne import subprocess from my_settings import * subject = sys.argv[1] cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis" # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src) setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % ( subject) subprocess.call([cmd, "1", setup_forward]) # conductivity = (0.3, 0.006, 0.3) # for three layers # model = mne.make_bem_model(subject=subject, ico=None, # conductivity=conductivity, # subjects_dir=subjects_dir) # bem = mne.make_bem_solution(model) # mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
<commit_before>from __future__ import print_function import mne from my_settings import * subject = sys.argv[1] # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src) conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=None, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject) <commit_msg>Change to make BEM solution from mne-C<commit_after>
from __future__ import print_function import mne import subprocess from my_settings import * subject = sys.argv[1] cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis" # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src) setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % ( subject) subprocess.call([cmd, "1", setup_forward]) # conductivity = (0.3, 0.006, 0.3) # for three layers # model = mne.make_bem_model(subject=subject, ico=None, # conductivity=conductivity, # subjects_dir=subjects_dir) # bem = mne.make_bem_solution(model) # mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
from __future__ import print_function import mne from my_settings import * subject = sys.argv[1] # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src) conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=None, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject) Change to make BEM solution from mne-Cfrom __future__ import print_function import mne import subprocess from my_settings import * subject = sys.argv[1] cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis" # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src) setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % ( subject) subprocess.call([cmd, "1", setup_forward]) # conductivity = (0.3, 0.006, 0.3) # for three layers # model = mne.make_bem_model(subject=subject, ico=None, # conductivity=conductivity, # subjects_dir=subjects_dir) # bem = mne.make_bem_solution(model) # mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
<commit_before>from __future__ import print_function import mne from my_settings import * subject = sys.argv[1] # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src) conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=None, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject) <commit_msg>Change to make BEM solution from mne-C<commit_after>from __future__ import print_function import mne import subprocess from my_settings import * subject = sys.argv[1] cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis" # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src) setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % ( subject) subprocess.call([cmd, "1", setup_forward]) # conductivity = (0.3, 0.006, 0.3) # for three layers # model = mne.make_bem_model(subject=subject, ico=None, # conductivity=conductivity, # subjects_dir=subjects_dir) # bem = mne.make_bem_solution(model) # mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
27c614b30eda339ca0c61f35e498be6456f2280f
scoring/__init__.py
scoring/__init__.py
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model, descriptor_generator, model_opts = {}, desc_opts = {}): self.model = model() self.descriptor_generator = descriptor_generator(**desc_opts) def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model_instance, descriptor_generator_instance): self.model = model_instance self.descriptor_generator = descriptor_generator_instance def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
Make scorer accept instances of model and desc. gen.
Make scorer accept instances of model and desc. gen.
Python
bsd-3-clause
mwojcikowski/opendrugdiscovery
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model, descriptor_generator, model_opts = {}, desc_opts = {}): self.model = model() self.descriptor_generator = descriptor_generator(**desc_opts) def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb')) Make scorer accept instances of model and desc. gen.
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model_instance, descriptor_generator_instance): self.model = model_instance self.descriptor_generator = descriptor_generator_instance def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
<commit_before>import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model, descriptor_generator, model_opts = {}, desc_opts = {}): self.model = model() self.descriptor_generator = descriptor_generator(**desc_opts) def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb')) <commit_msg>Make scorer accept instances of model and desc. gen.<commit_after>
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model_instance, descriptor_generator_instance): self.model = model_instance self.descriptor_generator = descriptor_generator_instance def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model, descriptor_generator, model_opts = {}, desc_opts = {}): self.model = model() self.descriptor_generator = descriptor_generator(**desc_opts) def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb')) Make scorer accept instances of model and desc. gen.import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model_instance, descriptor_generator_instance): self.model = model_instance self.descriptor_generator = descriptor_generator_instance def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
<commit_before>import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model, descriptor_generator, model_opts = {}, desc_opts = {}): self.model = model() self.descriptor_generator = descriptor_generator(**desc_opts) def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb')) <commit_msg>Make scorer accept instances of model and desc. gen.<commit_after>import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model_instance, descriptor_generator_instance): self.model = model_instance self.descriptor_generator = descriptor_generator_instance def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
c9b6cec70b2162b98d836f8100bc039f19fe23cb
googleapiclient/__init__.py
googleapiclient/__init__.py
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.0"
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.1"
Bump version number to 1.4.1
Bump version number to 1.4.1
Python
apache-2.0
googleapis/google-api-python-client,googleapis/google-api-python-client
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.0" Bump version number to 1.4.1
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.1"
<commit_before># Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.0" <commit_msg>Bump version number to 1.4.1<commit_after>
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.1"
# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.0" Bump version number to 1.4.1# Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.1"
<commit_before># Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.0" <commit_msg>Bump version number to 1.4.1<commit_after># Copyright 2014 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = "1.4.1"
3963037eb008d077e439029a33b60516cde399c6
massa/config.py
massa/config.py
# -*- coding: utf-8 -*- import logging class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = logging.DEBUG
# -*- coding: utf-8 -*- class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = 10
Define logging level with a numeric value.
Define logging level with a numeric value.
Python
mit
jaapverloop/massa
# -*- coding: utf-8 -*- import logging class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = logging.DEBUG Define logging level with a numeric value.
# -*- coding: utf-8 -*- class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = 10
<commit_before># -*- coding: utf-8 -*- import logging class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = logging.DEBUG <commit_msg>Define logging level with a numeric value.<commit_after>
# -*- coding: utf-8 -*- class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = 10
# -*- coding: utf-8 -*- import logging class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = logging.DEBUG Define logging level with a numeric value.# -*- coding: utf-8 -*- class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = 10
<commit_before># -*- coding: utf-8 -*- import logging class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = logging.DEBUG <commit_msg>Define logging level with a numeric value.<commit_after># -*- coding: utf-8 -*- class Production(object): DEBUG = False TESTING = False SECRET_KEY = '##CHANGEME##' SQLALCHEMY_DATABASE_URI = 'postgresql://massa:secret@localhost/massa' SQLALCHEMY_ECHO = False class Development(Production): DEBUG = True LOGGER_LEVEL = 10
b5477239d7b1ee9e73265b023355e8e83826ec49
scrapy_rss/items.py
scrapy_rss/items.py
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
Fix RssItem when each scraped item is instance of RssItem
Fix RssItem when each scraped item is instance of RssItem
Python
bsd-3-clause
woxcab/scrapy_rss
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem() Fix RssItem when each scraped item is instance of RssItem
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
<commit_before># -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem() <commit_msg>Fix RssItem when each scraped item is instance of RssItem<commit_after>
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem() Fix RssItem when each scraped item is instance of RssItem# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
<commit_before># -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem() <commit_msg>Fix RssItem when each scraped item is instance of RssItem<commit_after># -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
c3937702d8d9fa4ef7661a555876ad69654f88fd
scenarios/passtr/bob_cfg.py
scenarios/passtr/bob_cfg.py
from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig class AUTH_CREDS(AUTH_CREDS_orig): enalgs = ('SHA-256-sess', 'SHA-256', 'MD5-sess', 'MD5', None) realm = 'VoIPTests.NET' def __init__(self): AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
Enable all auth algorithms that might be emitted by the alice.
Enable all auth algorithms that might be emitted by the alice.
Python
bsd-2-clause
sippy/voiptests,sippy/voiptests
Enable all auth algorithms that might be emitted by the alice.
from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig class AUTH_CREDS(AUTH_CREDS_orig): enalgs = ('SHA-256-sess', 'SHA-256', 'MD5-sess', 'MD5', None) realm = 'VoIPTests.NET' def __init__(self): AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
<commit_before><commit_msg>Enable all auth algorithms that might be emitted by the alice.<commit_after>
from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig class AUTH_CREDS(AUTH_CREDS_orig): enalgs = ('SHA-256-sess', 'SHA-256', 'MD5-sess', 'MD5', None) realm = 'VoIPTests.NET' def __init__(self): AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
Enable all auth algorithms that might be emitted by the alice.from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig class AUTH_CREDS(AUTH_CREDS_orig): enalgs = ('SHA-256-sess', 'SHA-256', 'MD5-sess', 'MD5', None) realm = 'VoIPTests.NET' def __init__(self): AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
<commit_before><commit_msg>Enable all auth algorithms that might be emitted by the alice.<commit_after>from lib.test_config import AUTH_CREDS as AUTH_CREDS_orig class AUTH_CREDS(AUTH_CREDS_orig): enalgs = ('SHA-256-sess', 'SHA-256', 'MD5-sess', 'MD5', None) realm = 'VoIPTests.NET' def __init__(self): AUTH_CREDS_orig.__init__(self, 'mightyuser', 's3cr3tpAssw0Rd')
ee69971832120f4492e8f41abfbcb9c87e398d6a
DeepFried2/utils.py
DeepFried2/utils.py
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) )
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) def count_params(module): params, _ = module.parameters() return sum(p.get_value().size for p in params) def save_params(module, where): params, _ = module.parameters() _np.savez_compressed(where, params=[p.get_value() for p in params]) def load_params(module, fromwhere): params, _ = module.parameters() with _np.load(fromwhere) as f: for p, v in zip(params, f['params']): p.set_value(v)
Add utility to save/load parameters, i.e. models.
Add utility to save/load parameters, i.e. models. Also adds a utility to compute the number of parameters, because that's always interesting and often reported in papers.
Python
mit
yobibyte/DeepFried2,lucasb-eyer/DeepFried2,elPistolero/DeepFried2,Pandoro/DeepFried2
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) Add utility to save/load parameters, i.e. models. Also adds a utility to compute the number of parameters, because that's always interesting and often reported in papers.
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) def count_params(module): params, _ = module.parameters() return sum(p.get_value().size for p in params) def save_params(module, where): params, _ = module.parameters() _np.savez_compressed(where, params=[p.get_value() for p in params]) def load_params(module, fromwhere): params, _ = module.parameters() with _np.load(fromwhere) as f: for p, v in zip(params, f['params']): p.set_value(v)
<commit_before>import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) <commit_msg>Add utility to save/load parameters, i.e. models. Also adds a utility to compute the number of parameters, because that's always interesting and often reported in papers.<commit_after>
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) def count_params(module): params, _ = module.parameters() return sum(p.get_value().size for p in params) def save_params(module, where): params, _ = module.parameters() _np.savez_compressed(where, params=[p.get_value() for p in params]) def load_params(module, fromwhere): params, _ = module.parameters() with _np.load(fromwhere) as f: for p, v in zip(params, f['params']): p.set_value(v)
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) Add utility to save/load parameters, i.e. models. Also adds a utility to compute the number of parameters, because that's always interesting and often reported in papers.import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) def count_params(module): params, _ = module.parameters() return sum(p.get_value().size for p in params) def save_params(module, where): params, _ = module.parameters() _np.savez_compressed(where, params=[p.get_value() for p in params]) def load_params(module, fromwhere): params, _ = module.parameters() with _np.load(fromwhere) as f: for p, v in zip(params, f['params']): p.set_value(v)
<commit_before>import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) <commit_msg>Add utility to save/load parameters, i.e. models. Also adds a utility to compute the number of parameters, because that's always interesting and often reported in papers.<commit_after>import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) def count_params(module): params, _ = module.parameters() return sum(p.get_value().size for p in params) def save_params(module, where): params, _ = module.parameters() _np.savez_compressed(where, params=[p.get_value() for p in params]) def load_params(module, fromwhere): params, _ = module.parameters() with _np.load(fromwhere) as f: for p, v in zip(params, f['params']): p.set_value(v)
29e4dc4b11f691a8aaf4b987e6a9e74214f19365
journal.py
journal.py
# -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True)
# -*- coding: utf-8 -*- from flask import Flask from flask import g import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() def get_database_connection(): db = getattr(g, 'db', None) if db is None: g.db = db = connect_db() return db @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: if exception and isinstance(exception, psycopg2.Error): # if there was a problem with the database, rollback any # existing transaction db.rollback() else: # otherwise, commit db.commit() db.close() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True)
Add flask.g and connection/teardown operators.
Add flask.g and connection/teardown operators.
Python
mit
lfritts/learning_journal,lfritts/learning_journal
# -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True) Add flask.g and connection/teardown operators.
# -*- coding: utf-8 -*- from flask import Flask from flask import g import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() def get_database_connection(): db = getattr(g, 'db', None) if db is None: g.db = db = connect_db() return db @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: if exception and isinstance(exception, psycopg2.Error): # if there was a problem with the database, rollback any # existing transaction db.rollback() else: # otherwise, commit db.commit() db.close() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True)
<commit_before># -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True) <commit_msg>Add flask.g and connection/teardown operators.<commit_after>
# -*- coding: utf-8 -*- from flask import Flask from flask import g import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() def get_database_connection(): db = getattr(g, 'db', None) if db is None: g.db = db = connect_db() return db @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: if exception and isinstance(exception, psycopg2.Error): # if there was a problem with the database, rollback any # existing transaction db.rollback() else: # otherwise, commit db.commit() db.close() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True)
# -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True) Add flask.g and connection/teardown operators.# -*- coding: utf-8 -*- from flask import Flask from flask import g import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() def get_database_connection(): db = getattr(g, 'db', None) if db is None: g.db = db = connect_db() return db @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: if exception and isinstance(exception, psycopg2.Error): # if there was a problem with the database, rollback any # existing transaction db.rollback() else: # otherwise, commit db.commit() db.close() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True)
<commit_before># -*- coding: utf-8 -*- from flask import Flask import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True) <commit_msg>Add flask.g and connection/teardown operators.<commit_after># -*- coding: utf-8 -*- from flask import Flask from flask import g import os import psycopg2 from contextlib import closing DB_SCHEMA = """ DROP TABLE IF EXISTS entries; CREATE TABLE entries ( id serial PRIMARY KEY, title VARCHAR (127) NOT NULL, text TEXT NOT NULL, created TIMESTAMP NOT NULL ) """ app = Flask(__name__) app.config['DATABASE'] = os.environ.get( 'DATABASE_URL', 'dbname=learning_journal' ) def connect_db(): """Return a connection to the configured database""" return psycopg2.connect(app.config['DATABASE']) def init_db(): """Initialize the database using DB_SCHEMA WARNING: executing this function will drop existing tables. """ with closing(connect_db()) as db: db.cursor().execute(DB_SCHEMA) db.commit() def get_database_connection(): db = getattr(g, 'db', None) if db is None: g.db = db = connect_db() return db @app.teardown_request def teardown_request(exception): db = getattr(g, 'db', None) if db is not None: if exception and isinstance(exception, psycopg2.Error): # if there was a problem with the database, rollback any # existing transaction db.rollback() else: # otherwise, commit db.commit() db.close() @app.route('/') def hello(): return u'Hello world!' if __name__ == '__main__': app.run(debug=True)
9e7e61256eb2ca2b4f4f19ce5b926709a593a28b
vispy/app/tests/test_interactive.py
vispy/app/tests/test_interactive.py
from nose.tools import assert_equal, assert_true, assert_false, assert_raises from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
from nose.tools import assert_equal, assert_true, assert_false from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
Fix for flake8 checks on new test file.
Fix for flake8 checks on new test file.
Python
bsd-3-clause
jay3sh/vispy,ghisvail/vispy,hronoses/vispy,kkuunnddaannkk/vispy,michaelaye/vispy,dchilds7/Deysha-Star-Formation,sh4wn/vispy,QuLogic/vispy,srinathv/vispy,sh4wn/vispy,QuLogic/vispy,julienr/vispy,kkuunnddaannkk/vispy,drufat/vispy,Eric89GXL/vispy,jay3sh/vispy,jdreaver/vispy,RebeccaWPerry/vispy,RebeccaWPerry/vispy,jay3sh/vispy,sbtlaarzc/vispy,Eric89GXL/vispy,srinathv/vispy,hronoses/vispy,drufat/vispy,julienr/vispy,Eric89GXL/vispy,kkuunnddaannkk/vispy,inclement/vispy,sh4wn/vispy,jdreaver/vispy,inclement/vispy,bollu/vispy,bollu/vispy,RebeccaWPerry/vispy,dchilds7/Deysha-Star-Formation,sbtlaarzc/vispy,inclement/vispy,julienr/vispy,michaelaye/vispy,sbtlaarzc/vispy,jdreaver/vispy,ghisvail/vispy,michaelaye/vispy,QuLogic/vispy,ghisvail/vispy,srinathv/vispy,hronoses/vispy,dchilds7/Deysha-Star-Formation,drufat/vispy,bollu/vispy
from nose.tools import assert_equal, assert_true, assert_false, assert_raises from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()Fix for flake8 checks on new test file.
from nose.tools import assert_equal, assert_true, assert_false from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
<commit_before>from nose.tools import assert_equal, assert_true, assert_false, assert_raises from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()<commit_msg>Fix for flake8 checks on new test file.<commit_after>
from nose.tools import assert_equal, assert_true, assert_false from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
from nose.tools import assert_equal, assert_true, assert_false, assert_raises from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()Fix for flake8 checks on new test file.from nose.tools import assert_equal, assert_true, assert_false from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
<commit_before>from nose.tools import assert_equal, assert_true, assert_false, assert_raises from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()<commit_msg>Fix for flake8 checks on new test file.<commit_after>from nose.tools import assert_equal, assert_true, assert_false from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
efc0f438e894fa21ce32665ec26c19751ec2ce10
ureport_project/wsgi_app.py
ureport_project/wsgi_app.py
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler application = WSGIHandler()
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler from linesman.middleware import make_linesman_middleware application = WSGIHandler() application = make_linesman_middleware(application)
Apply linesman profiler to the wsgi app
Apply linesman profiler to the wsgi app I fully expect this commit to be backed out when we get the profiling stuff sorted out, but thankfully, this profiler can be disabled for a live site. check out http://<site>/__profiler__ after installing linesman, and running the uwsgi server
Python
bsd-3-clause
unicefuganda/ureport,unicefuganda/ureport,unicefuganda/ureport,mbanje/ureport_uganda,mbanje/ureport_uganda
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler application = WSGIHandler() Apply linesman profiler to the wsgi app I fully expect this commit to be backed out when we get the profiling stuff sorted out, but thankfully, this profiler can be disabled for a live site. check out http://<site>/__profiler__ after installing linesman, and running the uwsgi server
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler from linesman.middleware import make_linesman_middleware application = WSGIHandler() application = make_linesman_middleware(application)
<commit_before># wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler application = WSGIHandler() <commit_msg>Apply linesman profiler to the wsgi app I fully expect this commit to be backed out when we get the profiling stuff sorted out, but thankfully, this profiler can be disabled for a live site. check out http://<site>/__profiler__ after installing linesman, and running the uwsgi server<commit_after>
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler from linesman.middleware import make_linesman_middleware application = WSGIHandler() application = make_linesman_middleware(application)
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler application = WSGIHandler() Apply linesman profiler to the wsgi app I fully expect this commit to be backed out when we get the profiling stuff sorted out, but thankfully, this profiler can be disabled for a live site. check out http://<site>/__profiler__ after installing linesman, and running the uwsgi server# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler from linesman.middleware import make_linesman_middleware application = WSGIHandler() application = make_linesman_middleware(application)
<commit_before># wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler application = WSGIHandler() <commit_msg>Apply linesman profiler to the wsgi app I fully expect this commit to be backed out when we get the profiling stuff sorted out, but thankfully, this profiler can be disabled for a live site. check out http://<site>/__profiler__ after installing linesman, and running the uwsgi server<commit_after># wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler from linesman.middleware import make_linesman_middleware application = WSGIHandler() application = make_linesman_middleware(application)
5ba9888d267d663fb0ab0dfbfd9346dc20f4c0c1
test/test_turtle_serialize.py
test/test_turtle_serialize.py
import rdflib from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = rdflib.Graph() u = rdflib.URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
from rdflib import Graph, URIRef, BNode, RDF, Literal from rdflib.collection import Collection from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = Graph() u = URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s def testTurtleBoolList(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() list_item1 = BNode() list_item2 = BNode() list_item3 = BNode() g1.add((subject, predicate, list_item1)) g1.add((list_item1, RDF.first, Literal(True))) g1.add((list_item1, RDF.rest, list_item2)) g1.add((list_item2, RDF.first, Literal(False))) g1.add((list_item2, RDF.rest, list_item3)) g1.add((list_item3, RDF.first, Literal(True))) g1.add((list_item3, RDF.rest, RDF.nil)) ttl_dump = g1.serialize(format="turtle") g2 = Graph() g2.parse(data=ttl_dump, format="turtle") list_id = g2.value(subject, predicate) bool_list = [i.toPython() for i in Collection(g2, list_id)] assert bool_list == [True, False, True] if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
Test boolean list serialization in Turtle
Test boolean list serialization in Turtle
Python
bsd-3-clause
RDFLib/rdflib,ssssam/rdflib,armandobs14/rdflib,yingerj/rdflib,RDFLib/rdflib,ssssam/rdflib,ssssam/rdflib,avorio/rdflib,marma/rdflib,marma/rdflib,RDFLib/rdflib,ssssam/rdflib,dbs/rdflib,armandobs14/rdflib,dbs/rdflib,dbs/rdflib,marma/rdflib,avorio/rdflib,marma/rdflib,yingerj/rdflib,RDFLib/rdflib,yingerj/rdflib,dbs/rdflib,armandobs14/rdflib,avorio/rdflib,armandobs14/rdflib,yingerj/rdflib,avorio/rdflib
import rdflib from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = rdflib.Graph() u = rdflib.URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0]) Test boolean list serialization in Turtle
from rdflib import Graph, URIRef, BNode, RDF, Literal from rdflib.collection import Collection from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = Graph() u = URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s def testTurtleBoolList(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() list_item1 = BNode() list_item2 = BNode() list_item3 = BNode() g1.add((subject, predicate, list_item1)) g1.add((list_item1, RDF.first, Literal(True))) g1.add((list_item1, RDF.rest, list_item2)) g1.add((list_item2, RDF.first, Literal(False))) g1.add((list_item2, RDF.rest, list_item3)) g1.add((list_item3, RDF.first, Literal(True))) g1.add((list_item3, RDF.rest, RDF.nil)) ttl_dump = g1.serialize(format="turtle") g2 = Graph() g2.parse(data=ttl_dump, format="turtle") list_id = g2.value(subject, predicate) bool_list = [i.toPython() for i in Collection(g2, list_id)] assert bool_list == [True, False, True] if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
<commit_before>import rdflib from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = rdflib.Graph() u = rdflib.URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0]) <commit_msg>Test boolean list serialization in Turtle<commit_after>
from rdflib import Graph, URIRef, BNode, RDF, Literal from rdflib.collection import Collection from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = Graph() u = URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s def testTurtleBoolList(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() list_item1 = BNode() list_item2 = BNode() list_item3 = BNode() g1.add((subject, predicate, list_item1)) g1.add((list_item1, RDF.first, Literal(True))) g1.add((list_item1, RDF.rest, list_item2)) g1.add((list_item2, RDF.first, Literal(False))) g1.add((list_item2, RDF.rest, list_item3)) g1.add((list_item3, RDF.first, Literal(True))) g1.add((list_item3, RDF.rest, RDF.nil)) ttl_dump = g1.serialize(format="turtle") g2 = Graph() g2.parse(data=ttl_dump, format="turtle") list_id = g2.value(subject, predicate) bool_list = [i.toPython() for i in Collection(g2, list_id)] assert bool_list == [True, False, True] if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
import rdflib from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = rdflib.Graph() u = rdflib.URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0]) Test boolean list serialization in Turtlefrom rdflib import Graph, URIRef, BNode, RDF, Literal from rdflib.collection import Collection from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = Graph() u = URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s def testTurtleBoolList(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() list_item1 = BNode() list_item2 = BNode() list_item3 = BNode() g1.add((subject, predicate, list_item1)) g1.add((list_item1, RDF.first, Literal(True))) g1.add((list_item1, RDF.rest, list_item2)) g1.add((list_item2, RDF.first, Literal(False))) g1.add((list_item2, RDF.rest, list_item3)) g1.add((list_item3, RDF.first, Literal(True))) g1.add((list_item3, RDF.rest, RDF.nil)) ttl_dump = g1.serialize(format="turtle") g2 = Graph() g2.parse(data=ttl_dump, format="turtle") list_id = g2.value(subject, predicate) bool_list = [i.toPython() for i in Collection(g2, list_id)] assert bool_list == [True, False, True] if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
<commit_before>import rdflib from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = rdflib.Graph() u = rdflib.URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0]) <commit_msg>Test boolean list serialization in Turtle<commit_after>from rdflib import Graph, URIRef, BNode, RDF, Literal from rdflib.collection import Collection from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = Graph() u = URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s def testTurtleBoolList(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() list_item1 = BNode() list_item2 = BNode() list_item3 = BNode() g1.add((subject, predicate, list_item1)) g1.add((list_item1, RDF.first, Literal(True))) g1.add((list_item1, RDF.rest, list_item2)) g1.add((list_item2, RDF.first, Literal(False))) g1.add((list_item2, RDF.rest, list_item3)) g1.add((list_item3, RDF.first, Literal(True))) g1.add((list_item3, RDF.rest, RDF.nil)) ttl_dump = g1.serialize(format="turtle") g2 = Graph() g2.parse(data=ttl_dump, format="turtle") list_id = g2.value(subject, predicate) bool_list = [i.toPython() for i in Collection(g2, list_id)] assert bool_list == [True, False, True] if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
8126ca21bcf8da551906eff348c92cb71fe79e6e
readthedocs/doc_builder/base.py
readthedocs/doc_builder/base.py
import os def restoring_chdir(fn): def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
import os from functools import wraps def restoring_chdir(fn): @wraps(fn) def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
Call wraps on the restoring_chdir decorator.
Call wraps on the restoring_chdir decorator.
Python
mit
alex/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,tddv/readthedocs.org,dirn/readthedocs.org,takluyver/readthedocs.org,nikolas/readthedocs.org,LukasBoersma/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,kdkeyser/readthedocs.org,asampat3090/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,gjtorikian/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,atsuyim/readthedocs.org,gjtorikian/readthedocs.org,takluyver/readthedocs.org,Carreau/readthedocs.org,johncosta/private-readthedocs.org,Carreau/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,Tazer/readthedocs.org,agjohnson/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,wanghaven/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,michaelmcandrew/readthedocs.org,GovReady/readthedocs.org,KamranMackey/readthedocs.org,nikolas/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,agjohnson/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,wanghaven/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,emawind84/readthedocs.org,stevepiercy/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,pombredanne/readthedocs.org,kenwang76/readthedocs.org,kenshinthebattosai/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,raven47git/readthedocs.org,johncosta/private-readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,nyergler/pythonslides,michaelmcandrew/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,safwanrahman/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,singingwolfboy/readthedocs.org,kdkeyser/readthedocs.org,LukasBoersma/readthedocs.org,emawind84/readthedocs.org,raven47git/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,cgourlay/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,raven47git/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,kenshinthebattosai/readthedocs.org,d0ugal/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,fujita-shintaro/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,CedarLogic/readthedocs.org,asampat3090/readthedocs.org,hach-que/readthedocs.org,michaelmcandrew/readthedocs.org,jerel/readthedocs.org,cgourlay/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,johncosta/private-readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,nikolas/readthedocs.org,takluyver/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,nikolas/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org,Carreau/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,ojii/readthedocs.org,jerel/readthedocs.org,VishvajitP/readthedocs.org,laplaceliu/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,VishvajitP/readthedocs.org,dirn/readthedocs.org,GovReady/readthedocs.org,sunnyzwh/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,nyergler/pythonslides,sils1297/readthedocs.org,kdkeyser/readthedocs.org,attakei/readthedocs-oauth,ojii/readthedocs.org,alex/readthedocs.org,clarkperkins/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,nyergler/pythonslides,raven47git/readthedocs.org,sunnyzwh/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,royalwang/readthedocs.org,stevepiercy/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,sid-kap/readthedocs.org,ojii/readthedocs.org,nyergler/pythonslides,kenwang76/readthedocs.org,davidfischer/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,stevepiercy/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,SteveViss/readthedocs.org,soulshake/readthedocs.org,techtonik/readthedocs.org
import os def restoring_chdir(fn): def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError Call wraps on the restoring_chdir decorator.
import os from functools import wraps def restoring_chdir(fn): @wraps(fn) def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
<commit_before>import os def restoring_chdir(fn): def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError <commit_msg>Call wraps on the restoring_chdir decorator.<commit_after>
import os from functools import wraps def restoring_chdir(fn): @wraps(fn) def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
import os def restoring_chdir(fn): def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError Call wraps on the restoring_chdir decorator.import os from functools import wraps def restoring_chdir(fn): @wraps(fn) def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
<commit_before>import os def restoring_chdir(fn): def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError <commit_msg>Call wraps on the restoring_chdir decorator.<commit_after>import os from functools import wraps def restoring_chdir(fn): @wraps(fn) def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
e229d8f731b8b34294127702b1333eefec6f95bc
server/main.py
server/main.py
from flask import Flask import yaml app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True
from flask import Flask import yaml import sql import psycopg2 app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True
Add SQL imports to python
Add SQL imports to python
Python
mit
aradler/Card-lockout,aradler/Card-lockout,aradler/Card-lockout
from flask import Flask import yaml app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True Add SQL imports to python
from flask import Flask import yaml import sql import psycopg2 app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True
<commit_before>from flask import Flask import yaml app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True <commit_msg>Add SQL imports to python<commit_after>
from flask import Flask import yaml import sql import psycopg2 app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True
from flask import Flask import yaml app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True Add SQL imports to pythonfrom flask import Flask import yaml import sql import psycopg2 app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True
<commit_before>from flask import Flask import yaml app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True <commit_msg>Add SQL imports to python<commit_after>from flask import Flask import yaml import sql import psycopg2 app = Flask(__name__) @app.route("/") def hello(): return "Hello World!" if __name__ == '__main__': # read and return a yaml file (called 'config.yaml' by default) and give it # back as a dictionary with open( 'config.yaml' ) as f: config = yaml.load( f ) app.run( host='0.0.0.0', port=config['port'] ) app.debug = True
9428a6181cd33b5847dd1a348a651a4c794092ab
salt/modules/logmod.py
salt/modules/logmod.py
""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt.log.info(var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True
""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt["log.info"](var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True
Update jinja example to recommended syntax
Update jinja example to recommended syntax
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt.log.info(var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True Update jinja example to recommended syntax
""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt["log.info"](var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True
<commit_before>""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt.log.info(var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True <commit_msg>Update jinja example to recommended syntax<commit_after>
""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt["log.info"](var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True
""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt.log.info(var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True Update jinja example to recommended syntax""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt["log.info"](var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True
<commit_before>""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt.log.info(var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True <commit_msg>Update jinja example to recommended syntax<commit_after>""" On-demand logging ================= .. versionadded:: 2017.7.0 The sole purpose of this module is logging messages in the (proxy) minion. It comes very handy when debugging complex Jinja templates, for example: .. code-block:: jinja {%- for var in range(10) %} {%- do salt["log.info"](var) -%} {%- endfor %} CLI Example: .. code-block:: bash salt '*' log.error "Please don't do that, this module is not for CLI use!" """ import logging log = logging.getLogger(__name__) __virtualname__ = "log" __proxyenabled__ = ["*"] def __virtual__(): return __virtualname__ def debug(message): """ Log message at level DEBUG. """ log.debug(message) return True def info(message): """ Log message at level INFO. """ log.info(message) return True def warning(message): """ Log message at level WARNING. """ log.warning(message) return True def error(message): """ Log message at level ERROR. """ log.error(message) return True def critical(message): """ Log message at level CRITICAL. """ log.critical(message) return True def exception(message): """ Log message at level EXCEPTION. """ log.exception(message) return True
76bf8966a25932822fca1c94586fccfa096ee02b
tests/misc/test_base_model.py
tests/misc/test_base_model.py
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project Cosmos Landromat>") self.project.name = u"Big Buck Bunny" self.assertEqual(str(self.project), "<Project Big Buck Bunny>") def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project %s>" % self.project.id) def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
Change base model string representation
Change base model string representation
Python
agpl-3.0
cgwire/zou
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project Cosmos Landromat>") self.project.name = u"Big Buck Bunny" self.assertEqual(str(self.project), "<Project Big Buck Bunny>") def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass Change base model string representation
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project %s>" % self.project.id) def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
<commit_before># -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project Cosmos Landromat>") self.project.name = u"Big Buck Bunny" self.assertEqual(str(self.project), "<Project Big Buck Bunny>") def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass <commit_msg>Change base model string representation<commit_after>
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project %s>" % self.project.id) def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project Cosmos Landromat>") self.project.name = u"Big Buck Bunny" self.assertEqual(str(self.project), "<Project Big Buck Bunny>") def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass Change base model string representation# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project %s>" % self.project.id) def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
<commit_before># -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project Cosmos Landromat>") self.project.name = u"Big Buck Bunny" self.assertEqual(str(self.project), "<Project Big Buck Bunny>") def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass <commit_msg>Change base model string representation<commit_after># -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project %s>" % self.project.id) def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
c908edadadb866292a612103d2854bef4673efab
shinken/__init__.py
shinken/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # shinken.objects must be imported first: import objects
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>.
Remove superfluous import of shinken.objects in shinken/_init__.py.
Remove superfluous import of shinken.objects in shinken/_init__.py. Every script or test-case importing shinken has all the objects loaded, even if they are not required by the script or test-case at all. Also see <http://sourceforge.net/mailarchive/message.php?msg_id=29553474>.
Python
agpl-3.0
naparuba/shinken,claneys/shinken,naparuba/shinken,tal-nino/shinken,mohierf/shinken,titilambert/alignak,lets-software/shinken,KerkhoffTechnologies/shinken,Simage/shinken,mohierf/shinken,gst/alignak,lets-software/shinken,Aimage/shinken,Aimage/shinken,geektophe/shinken,Aimage/shinken,staute/shinken_package,savoirfairelinux/shinken,staute/shinken_deb,rednach/krill,peeyush-tm/shinken,geektophe/shinken,kaji-project/shinken,tal-nino/shinken,claneys/shinken,ddurieux/alignak,ddurieux/alignak,h4wkmoon/shinken,rednach/krill,rledisez/shinken,xorpaul/shinken,savoirfairelinux/shinken,mohierf/shinken,peeyush-tm/shinken,Simage/shinken,savoirfairelinux/shinken,savoirfairelinux/shinken,peeyush-tm/shinken,titilambert/alignak,kaji-project/shinken,tal-nino/shinken,ddurieux/alignak,xorpaul/shinken,fpeyre/shinken,tal-nino/shinken,lets-software/shinken,kaji-project/shinken,rledisez/shinken,h4wkmoon/shinken,naparuba/shinken,peeyush-tm/shinken,h4wkmoon/shinken,dfranco/shinken,kaji-project/shinken,xorpaul/shinken,peeyush-tm/shinken,lets-software/shinken,ddurieux/alignak,KerkhoffTechnologies/shinken,rledisez/shinken,h4wkmoon/shinken,tal-nino/shinken,fpeyre/shinken,Simage/shinken,h4wkmoon/shinken,staute/shinken_deb,gst/alignak,staute/shinken_deb,lets-software/shinken,Aimage/shinken,Simage/shinken,dfranco/shinken,geektophe/shinken,kaji-project/shinken,xorpaul/shinken,staute/shinken_package,rledisez/shinken,mohierf/shinken,claneys/shinken,gst/alignak,h4wkmoon/shinken,staute/shinken_package,rledisez/shinken,geektophe/shinken,geektophe/shinken,dfranco/shinken,fpeyre/shinken,claneys/shinken,mohierf/shinken,dfranco/shinken,ddurieux/alignak,ddurieux/alignak,KerkhoffTechnologies/shinken,naparuba/shinken,titilambert/alignak,titilambert/alignak,KerkhoffTechnologies/shinken,staute/shinken_deb,rledisez/shinken,savoirfairelinux/shinken,h4wkmoon/shinken,staute/shinken_deb,xorpaul/shinken,geektophe/shinken,xorpaul/shinken,claneys/shinken,Simage/shinken,claneys/shinken,staute/shinken_deb,peeyush-tm/shinken,kaji-project/shinken,KerkhoffTechnologies/shinken,Alignak-monitoring/alignak,rednach/krill,Simage/shinken,Aimage/shinken,rednach/krill,fpeyre/shinken,savoirfairelinux/shinken,staute/shinken_package,lets-software/shinken,staute/shinken_package,rednach/krill,rednach/krill,h4wkmoon/shinken,xorpaul/shinken,Aimage/shinken,Alignak-monitoring/alignak,dfranco/shinken,naparuba/shinken,fpeyre/shinken,gst/alignak,naparuba/shinken,dfranco/shinken,xorpaul/shinken,mohierf/shinken,kaji-project/shinken,fpeyre/shinken,KerkhoffTechnologies/shinken,tal-nino/shinken,staute/shinken_package
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # shinken.objects must be imported first: import objects Remove superfluous import of shinken.objects in shinken/_init__.py. Every script or test-case importing shinken has all the objects loaded, even if they are not required by the script or test-case at all. Also see <http://sourceforge.net/mailarchive/message.php?msg_id=29553474>.
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>.
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # shinken.objects must be imported first: import objects <commit_msg>Remove superfluous import of shinken.objects in shinken/_init__.py. Every script or test-case importing shinken has all the objects loaded, even if they are not required by the script or test-case at all. Also see <http://sourceforge.net/mailarchive/message.php?msg_id=29553474>.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # shinken.objects must be imported first: import objects Remove superfluous import of shinken.objects in shinken/_init__.py. Every script or test-case importing shinken has all the objects loaded, even if they are not required by the script or test-case at all. Also see <http://sourceforge.net/mailarchive/message.php?msg_id=29553474>.#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>.
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # shinken.objects must be imported first: import objects <commit_msg>Remove superfluous import of shinken.objects in shinken/_init__.py. Every script or test-case importing shinken has all the objects loaded, even if they are not required by the script or test-case at all. Also see <http://sourceforge.net/mailarchive/message.php?msg_id=29553474>.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>.
3153d1d25e8b6c25729880abca4da9a79f8036ff
editorsnotes/main/admin_views.py
editorsnotes/main/admin_views.py
from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request))
from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) user = request.user user_affiliation = user.get_profile().affiliation editor = Group.objects.get(name='Editors') admin = Group.objects.get(name='Admins') is_project_editor = (editor in user.groups.all() and user_affiliation == project) is_admin = admin in user.groups.all() if not (is_project_editor or is_admin): messages.add_message( request, messages.ERROR, 'You do not have permission to edit the roster of %s' % ( project.name) ) return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request))
Check if user can edit project roster
Check if user can edit project roster
Python
agpl-3.0
editorsnotes/editorsnotes,editorsnotes/editorsnotes
from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request)) Check if user can edit project roster
from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) user = request.user user_affiliation = user.get_profile().affiliation editor = Group.objects.get(name='Editors') admin = Group.objects.get(name='Admins') is_project_editor = (editor in user.groups.all() and user_affiliation == project) is_admin = admin in user.groups.all() if not (is_project_editor or is_admin): messages.add_message( request, messages.ERROR, 'You do not have permission to edit the roster of %s' % ( project.name) ) return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request))
<commit_before>from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request)) <commit_msg>Check if user can edit project roster<commit_after>
from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) user = request.user user_affiliation = user.get_profile().affiliation editor = Group.objects.get(name='Editors') admin = Group.objects.get(name='Admins') is_project_editor = (editor in user.groups.all() and user_affiliation == project) is_admin = admin in user.groups.all() if not (is_project_editor or is_admin): messages.add_message( request, messages.ERROR, 'You do not have permission to edit the roster of %s' % ( project.name) ) return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request))
from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request)) Check if user can edit project rosterfrom django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) user = request.user user_affiliation = user.get_profile().affiliation editor = Group.objects.get(name='Editors') admin = Group.objects.get(name='Admins') is_project_editor = (editor in user.groups.all() and user_affiliation == project) is_admin = admin in user.groups.all() if not (is_project_editor or is_admin): messages.add_message( request, messages.ERROR, 'You do not have permission to edit the roster of %s' % ( project.name) ) return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request))
<commit_before>from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request)) <commit_msg>Check if user can edit project roster<commit_after>from django.shortcuts import render_to_response, get_object_or_404 from django.http import HttpResponseRedirect from django.template import RequestContext from django.contrib.auth.models import User, Group from django.contrib import messages from models import Project from forms import ProjectUserFormSet def project_roster(request, project_id): o = {} project = get_object_or_404(Project, id=project_id) user = request.user user_affiliation = user.get_profile().affiliation editor = Group.objects.get(name='Editors') admin = Group.objects.get(name='Admins') is_project_editor = (editor in user.groups.all() and user_affiliation == project) is_admin = admin in user.groups.all() if not (is_project_editor or is_admin): messages.add_message( request, messages.ERROR, 'You do not have permission to edit the roster of %s' % ( project.name) ) return HttpResponseRedirect(request.META.get('HTTP_REFERER', '/')) if request.method == 'POST': formset = ProjectUserFormSet(request.POST) if formset.is_valid(): formset.save() messages.add_message( request, messages.SUCCESS, 'Roster for %s saved.' % (project.name)) return HttpResponseRedirect(request.path) else: #TODO pass else: project_roster = User.objects.filter( userprofile__affiliation=project).order_by('-is_active', '-last_login') o['formset'] = ProjectUserFormSet(queryset=project_roster) return render_to_response( 'admin/project_roster.html', o, context_instance=RequestContext(request))
85f759a9446cf988cc859d3b74d11e6b224bbd16
request/managers.py
request/managers.py
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') users = [] done = [] for request in requests: if not (request.user.pk in done): done.append(request.user.pk) users.append(request.user) return users
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') return set([request.user for request in requests])
Use a list comprehension and set() to make the active_users query simpler and faster.
Use a list comprehension and set() to make the active_users query simpler and faster.
Python
bsd-2-clause
kylef/django-request,gnublade/django-request,Derecho/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request,kylef/django-request
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') users = [] done = [] for request in requests: if not (request.user.pk in done): done.append(request.user.pk) users.append(request.user) return users Use a list comprehension and set() to make the active_users query simpler and faster.
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') return set([request.user for request in requests])
<commit_before>from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') users = [] done = [] for request in requests: if not (request.user.pk in done): done.append(request.user.pk) users.append(request.user) return users <commit_msg>Use a list comprehension and set() to make the active_users query simpler and faster.<commit_after>
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') return set([request.user for request in requests])
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') users = [] done = [] for request in requests: if not (request.user.pk in done): done.append(request.user.pk) users.append(request.user) return users Use a list comprehension and set() to make the active_users query simpler and faster.from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') return set([request.user for request in requests])
<commit_before>from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') users = [] done = [] for request in requests: if not (request.user.pk in done): done.append(request.user.pk) users.append(request.user) return users <commit_msg>Use a list comprehension and set() to make the active_users query simpler and faster.<commit_after>from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') return set([request.user for request in requests])
cad9194d64786acadb49174f8797295f1bf0bcca
website/celery_worker.py
website/celery_worker.py
from app import celery from app import create_app app = create_app(config_override={'BDB_READONLY': True}) celery
from app import celery from app import create_app app = create_app(config_override={'HDB_READONLY': True}) celery
Update config variable name of HDB in celery worker
Update config variable name of HDB in celery worker
Python
lgpl-2.1
reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations
from app import celery from app import create_app app = create_app(config_override={'BDB_READONLY': True}) celery Update config variable name of HDB in celery worker
from app import celery from app import create_app app = create_app(config_override={'HDB_READONLY': True}) celery
<commit_before>from app import celery from app import create_app app = create_app(config_override={'BDB_READONLY': True}) celery <commit_msg>Update config variable name of HDB in celery worker<commit_after>
from app import celery from app import create_app app = create_app(config_override={'HDB_READONLY': True}) celery
from app import celery from app import create_app app = create_app(config_override={'BDB_READONLY': True}) celery Update config variable name of HDB in celery workerfrom app import celery from app import create_app app = create_app(config_override={'HDB_READONLY': True}) celery
<commit_before>from app import celery from app import create_app app = create_app(config_override={'BDB_READONLY': True}) celery <commit_msg>Update config variable name of HDB in celery worker<commit_after>from app import celery from app import create_app app = create_app(config_override={'HDB_READONLY': True}) celery
11583cfca501164c5c08af70f66d430cd180dbc5
examples/basic_nest/make_nest.py
examples/basic_nest/make_nest.py
#!/usr/bin/env python import collections import os import os.path import sys from nestly import nestly wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') ctl = collections.OrderedDict() ctl['strategy'] = nestly.repeat_iterable(('exhaustive', 'approximate')) ctl['run_count'] = nestly.repeat_iterable([10**(i + 1) for i in xrange(3)]) ctl['input_file'] = lambda x: map(nestly.file_nv, nestly.collect_globs(input_dir, ['file*'])) nestly.build(ctl, 'runs')
#!/usr/bin/env python import glob import os import os.path from nestly import Nest wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') nest = Nest() nest.add_level('strategy', ('exhaustive', 'approximate')) nest.add_level('run_count', [10**i for i in xrange(3)]) nest.add_level('input_file', glob.glob(os.path.join(input_dir, 'file*')), label_func=os.path.basename) nest.build('runs')
Update basic_nest for new API
Update basic_nest for new API
Python
mit
fhcrc/nestly
#!/usr/bin/env python import collections import os import os.path import sys from nestly import nestly wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') ctl = collections.OrderedDict() ctl['strategy'] = nestly.repeat_iterable(('exhaustive', 'approximate')) ctl['run_count'] = nestly.repeat_iterable([10**(i + 1) for i in xrange(3)]) ctl['input_file'] = lambda x: map(nestly.file_nv, nestly.collect_globs(input_dir, ['file*'])) nestly.build(ctl, 'runs') Update basic_nest for new API
#!/usr/bin/env python import glob import os import os.path from nestly import Nest wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') nest = Nest() nest.add_level('strategy', ('exhaustive', 'approximate')) nest.add_level('run_count', [10**i for i in xrange(3)]) nest.add_level('input_file', glob.glob(os.path.join(input_dir, 'file*')), label_func=os.path.basename) nest.build('runs')
<commit_before>#!/usr/bin/env python import collections import os import os.path import sys from nestly import nestly wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') ctl = collections.OrderedDict() ctl['strategy'] = nestly.repeat_iterable(('exhaustive', 'approximate')) ctl['run_count'] = nestly.repeat_iterable([10**(i + 1) for i in xrange(3)]) ctl['input_file'] = lambda x: map(nestly.file_nv, nestly.collect_globs(input_dir, ['file*'])) nestly.build(ctl, 'runs') <commit_msg>Update basic_nest for new API<commit_after>
#!/usr/bin/env python import glob import os import os.path from nestly import Nest wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') nest = Nest() nest.add_level('strategy', ('exhaustive', 'approximate')) nest.add_level('run_count', [10**i for i in xrange(3)]) nest.add_level('input_file', glob.glob(os.path.join(input_dir, 'file*')), label_func=os.path.basename) nest.build('runs')
#!/usr/bin/env python import collections import os import os.path import sys from nestly import nestly wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') ctl = collections.OrderedDict() ctl['strategy'] = nestly.repeat_iterable(('exhaustive', 'approximate')) ctl['run_count'] = nestly.repeat_iterable([10**(i + 1) for i in xrange(3)]) ctl['input_file'] = lambda x: map(nestly.file_nv, nestly.collect_globs(input_dir, ['file*'])) nestly.build(ctl, 'runs') Update basic_nest for new API#!/usr/bin/env python import glob import os import os.path from nestly import Nest wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') nest = Nest() nest.add_level('strategy', ('exhaustive', 'approximate')) nest.add_level('run_count', [10**i for i in xrange(3)]) nest.add_level('input_file', glob.glob(os.path.join(input_dir, 'file*')), label_func=os.path.basename) nest.build('runs')
<commit_before>#!/usr/bin/env python import collections import os import os.path import sys from nestly import nestly wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') ctl = collections.OrderedDict() ctl['strategy'] = nestly.repeat_iterable(('exhaustive', 'approximate')) ctl['run_count'] = nestly.repeat_iterable([10**(i + 1) for i in xrange(3)]) ctl['input_file'] = lambda x: map(nestly.file_nv, nestly.collect_globs(input_dir, ['file*'])) nestly.build(ctl, 'runs') <commit_msg>Update basic_nest for new API<commit_after>#!/usr/bin/env python import glob import os import os.path from nestly import Nest wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') nest = Nest() nest.add_level('strategy', ('exhaustive', 'approximate')) nest.add_level('run_count', [10**i for i in xrange(3)]) nest.add_level('input_file', glob.glob(os.path.join(input_dir, 'file*')), label_func=os.path.basename) nest.build('runs')
978b41a29eda295974ed5cf1a7cd5b79b148f479
coverage/execfile.py
coverage/execfile.py
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) try: src = open(filename) imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) src = open(filename) try: imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
Move the open outside the try, since the finally is only needed once the file is successfully opened.
Move the open outside the try, since the finally is only needed once the file is successfully opened.
Python
apache-2.0
7WebPages/coveragepy,blueyed/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,blueyed/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,blueyed/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,larsbutler/coveragepy,hugovk/coveragepy
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) try: src = open(filename) imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close() Move the open outside the try, since the finally is only needed once the file is successfully opened.
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) src = open(filename) try: imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
<commit_before>"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) try: src = open(filename) imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close() <commit_msg>Move the open outside the try, since the finally is only needed once the file is successfully opened.<commit_after>
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) src = open(filename) try: imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) try: src = open(filename) imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close() Move the open outside the try, since the finally is only needed once the file is successfully opened."""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) src = open(filename) try: imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
<commit_before>"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) try: src = open(filename) imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close() <commit_msg>Move the open outside the try, since the finally is only needed once the file is successfully opened.<commit_after>"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) src = open(filename) try: imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
84d3738d2eb8a24dcb66cb329994f88bd55128c0
tests/test_utils.py
tests/test_utils.py
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' )
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) def test_pull(): from vdm.utils import pull d = {} d['mykey'] = 'Value' assert( pull(d, 'mykey') == 'Value' )
Add utils tests. Rework pull.
Add utils tests. Rework pull.
Python
mit
Brown-University-Library/vivo-data-management,Brown-University-Library/vivo-data-management
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) Add utils tests. Rework pull.
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) def test_pull(): from vdm.utils import pull d = {} d['mykey'] = 'Value' assert( pull(d, 'mykey') == 'Value' )
<commit_before> import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) <commit_msg>Add utils tests. Rework pull.<commit_after>
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) def test_pull(): from vdm.utils import pull d = {} d['mykey'] = 'Value' assert( pull(d, 'mykey') == 'Value' )
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) Add utils tests. Rework pull. import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) def test_pull(): from vdm.utils import pull d = {} d['mykey'] = 'Value' assert( pull(d, 'mykey') == 'Value' )
<commit_before> import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) <commit_msg>Add utils tests. Rework pull.<commit_after> import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) def test_pull(): from vdm.utils import pull d = {} d['mykey'] = 'Value' assert( pull(d, 'mykey') == 'Value' )
c34840a7ac20d22e650be09a515cee9dbfcf6043
tests/test_views.py
tests/test_views.py
from django.http import HttpResponse from djproxy.views import HttpProxy DOWNSTREAM_INJECTION = lambda x: x class LocalProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/ide/Yola/Yola.session.jsp" class SBProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/APIController" def index(request): DOWNSTREAM_INJECTION(request) return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
from django.http import HttpResponse from djproxy.views import HttpProxy class LocalProxy(HttpProxy): base_url = "http://localhost:8000/some/content/" def index(request): return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
Remove accidentally committed test code
Remove accidentally committed test code
Python
mit
thomasw/djproxy
from django.http import HttpResponse from djproxy.views import HttpProxy DOWNSTREAM_INJECTION = lambda x: x class LocalProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/ide/Yola/Yola.session.jsp" class SBProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/APIController" def index(request): DOWNSTREAM_INJECTION(request) return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/" Remove accidentally committed test code
from django.http import HttpResponse from djproxy.views import HttpProxy class LocalProxy(HttpProxy): base_url = "http://localhost:8000/some/content/" def index(request): return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
<commit_before>from django.http import HttpResponse from djproxy.views import HttpProxy DOWNSTREAM_INJECTION = lambda x: x class LocalProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/ide/Yola/Yola.session.jsp" class SBProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/APIController" def index(request): DOWNSTREAM_INJECTION(request) return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/" <commit_msg>Remove accidentally committed test code<commit_after>
from django.http import HttpResponse from djproxy.views import HttpProxy class LocalProxy(HttpProxy): base_url = "http://localhost:8000/some/content/" def index(request): return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
from django.http import HttpResponse from djproxy.views import HttpProxy DOWNSTREAM_INJECTION = lambda x: x class LocalProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/ide/Yola/Yola.session.jsp" class SBProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/APIController" def index(request): DOWNSTREAM_INJECTION(request) return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/" Remove accidentally committed test codefrom django.http import HttpResponse from djproxy.views import HttpProxy class LocalProxy(HttpProxy): base_url = "http://localhost:8000/some/content/" def index(request): return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
<commit_before>from django.http import HttpResponse from djproxy.views import HttpProxy DOWNSTREAM_INJECTION = lambda x: x class LocalProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/ide/Yola/Yola.session.jsp" class SBProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/APIController" def index(request): DOWNSTREAM_INJECTION(request) return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/" <commit_msg>Remove accidentally committed test code<commit_after>from django.http import HttpResponse from djproxy.views import HttpProxy class LocalProxy(HttpProxy): base_url = "http://localhost:8000/some/content/" def index(request): return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
2351100234180afc6f6510140e9f989051fb6511
PyFVCOM/__init__.py
PyFVCOM/__init__.py
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read_results from PyFVCOM import utilities
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read from PyFVCOM import utilities
Fix module name to import.
Fix module name to import.
Python
mit
pwcazenave/PyFVCOM
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read_results from PyFVCOM import utilities Fix module name to import.
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read from PyFVCOM import utilities
<commit_before>""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read_results from PyFVCOM import utilities <commit_msg>Fix module name to import.<commit_after>
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read from PyFVCOM import utilities
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read_results from PyFVCOM import utilities Fix module name to import.""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read from PyFVCOM import utilities
<commit_before>""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read_results from PyFVCOM import utilities <commit_msg>Fix module name to import.<commit_after>""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '2.0.0' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy from PyFVCOM import coast from PyFVCOM import ctd from PyFVCOM import current from PyFVCOM import grid from PyFVCOM import coordinate from PyFVCOM import ocean from PyFVCOM import stats from PyFVCOM import tidal_ellipse from PyFVCOM import tide from PyFVCOM import plot from PyFVCOM import read from PyFVCOM import utilities
ab418734f432691ec4a927be32364ee85baab35c
__init__.py
__init__.py
import inspect import python2.httplib2 as httplib2 globals().update(inspect.getmembers(httplib2))
import inspect import sys if sys.version_info[0] == 2: from .python2 import httplib2 else: from .python3 import httplib2 globals().update(inspect.getmembers(httplib2))
Use python version dependent import
Use python version dependent import Change-Id: Iae6bc0cc8d526162b91d0c18cf1fba1461aa9f98
Python
mit
wikimedia/pywikibot-externals-httplib2,wikimedia/pywikibot-externals-httplib2,jayvdb/httplib2,jayvdb/httplib2
import inspect import python2.httplib2 as httplib2 globals().update(inspect.getmembers(httplib2)) Use python version dependent import Change-Id: Iae6bc0cc8d526162b91d0c18cf1fba1461aa9f98
import inspect import sys if sys.version_info[0] == 2: from .python2 import httplib2 else: from .python3 import httplib2 globals().update(inspect.getmembers(httplib2))
<commit_before>import inspect import python2.httplib2 as httplib2 globals().update(inspect.getmembers(httplib2)) <commit_msg>Use python version dependent import Change-Id: Iae6bc0cc8d526162b91d0c18cf1fba1461aa9f98<commit_after>
import inspect import sys if sys.version_info[0] == 2: from .python2 import httplib2 else: from .python3 import httplib2 globals().update(inspect.getmembers(httplib2))
import inspect import python2.httplib2 as httplib2 globals().update(inspect.getmembers(httplib2)) Use python version dependent import Change-Id: Iae6bc0cc8d526162b91d0c18cf1fba1461aa9f98import inspect import sys if sys.version_info[0] == 2: from .python2 import httplib2 else: from .python3 import httplib2 globals().update(inspect.getmembers(httplib2))
<commit_before>import inspect import python2.httplib2 as httplib2 globals().update(inspect.getmembers(httplib2)) <commit_msg>Use python version dependent import Change-Id: Iae6bc0cc8d526162b91d0c18cf1fba1461aa9f98<commit_after>import inspect import sys if sys.version_info[0] == 2: from .python2 import httplib2 else: from .python3 import httplib2 globals().update(inspect.getmembers(httplib2))
9ab7efa44a8e7267b2902b6e23ff61381d31692c
profile_collection/startup/85-robot.py
profile_collection/startup/85-robot.py
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C class Robot(Device): robot_sample_number = C(EpicsSignal, 'ID:Tgt-SP') robot_load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') robot_unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') robot_execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') robot_status = C(EpicsSignal, 'Sts-Sts') robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C from ophyd.utils import set_and_wait class Robot(Device): sample_number = C(EpicsSignal, 'ID:Tgt-SP') load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') status = C(EpicsSignal, 'Sts-Sts') TH_POS = {'capilary':{'load':0, 'measure': 0}, 'flat': {'load': 0, 'measure': 0}, '':{}} DIFF_POS = {'capilary': (1,2),} def __init__(self, theta, diff): self.theta = theta self.diff = diff def load_sample(sample_number, sample_type): # self.theta.move(self.TH_POS[sample_type]['load'], wait=True) set_and_wait(self.sample_number, sample_number) set_and_wait(self.load_cmd, 1) self.execute_cmd.put(1) while self.status.get() != 'Idle': time.sleep(.1) # self.theta.move(self.TH_POS[sample_type]['measure'], wait=True) robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
Add sample loading logic to Robot.
WIP: Add sample loading logic to Robot.
Python
bsd-2-clause
NSLS-II-XPD/ipython_ophyd,NSLS-II-XPD/ipython_ophyd
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C class Robot(Device): robot_sample_number = C(EpicsSignal, 'ID:Tgt-SP') robot_load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') robot_unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') robot_execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') robot_status = C(EpicsSignal, 'Sts-Sts') robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py WIP: Add sample loading logic to Robot.
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C from ophyd.utils import set_and_wait class Robot(Device): sample_number = C(EpicsSignal, 'ID:Tgt-SP') load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') status = C(EpicsSignal, 'Sts-Sts') TH_POS = {'capilary':{'load':0, 'measure': 0}, 'flat': {'load': 0, 'measure': 0}, '':{}} DIFF_POS = {'capilary': (1,2),} def __init__(self, theta, diff): self.theta = theta self.diff = diff def load_sample(sample_number, sample_type): # self.theta.move(self.TH_POS[sample_type]['load'], wait=True) set_and_wait(self.sample_number, sample_number) set_and_wait(self.load_cmd, 1) self.execute_cmd.put(1) while self.status.get() != 'Idle': time.sleep(.1) # self.theta.move(self.TH_POS[sample_type]['measure'], wait=True) robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
<commit_before>from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C class Robot(Device): robot_sample_number = C(EpicsSignal, 'ID:Tgt-SP') robot_load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') robot_unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') robot_execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') robot_status = C(EpicsSignal, 'Sts-Sts') robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py <commit_msg>WIP: Add sample loading logic to Robot.<commit_after>
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C from ophyd.utils import set_and_wait class Robot(Device): sample_number = C(EpicsSignal, 'ID:Tgt-SP') load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') status = C(EpicsSignal, 'Sts-Sts') TH_POS = {'capilary':{'load':0, 'measure': 0}, 'flat': {'load': 0, 'measure': 0}, '':{}} DIFF_POS = {'capilary': (1,2),} def __init__(self, theta, diff): self.theta = theta self.diff = diff def load_sample(sample_number, sample_type): # self.theta.move(self.TH_POS[sample_type]['load'], wait=True) set_and_wait(self.sample_number, sample_number) set_and_wait(self.load_cmd, 1) self.execute_cmd.put(1) while self.status.get() != 'Idle': time.sleep(.1) # self.theta.move(self.TH_POS[sample_type]['measure'], wait=True) robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C class Robot(Device): robot_sample_number = C(EpicsSignal, 'ID:Tgt-SP') robot_load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') robot_unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') robot_execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') robot_status = C(EpicsSignal, 'Sts-Sts') robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py WIP: Add sample loading logic to Robot.from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C from ophyd.utils import set_and_wait class Robot(Device): sample_number = C(EpicsSignal, 'ID:Tgt-SP') load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') status = C(EpicsSignal, 'Sts-Sts') TH_POS = {'capilary':{'load':0, 'measure': 0}, 'flat': {'load': 0, 'measure': 0}, '':{}} DIFF_POS = {'capilary': (1,2),} def __init__(self, theta, diff): self.theta = theta self.diff = diff def load_sample(sample_number, sample_type): # self.theta.move(self.TH_POS[sample_type]['load'], wait=True) set_and_wait(self.sample_number, sample_number) set_and_wait(self.load_cmd, 1) self.execute_cmd.put(1) while self.status.get() != 'Idle': time.sleep(.1) # self.theta.move(self.TH_POS[sample_type]['measure'], wait=True) robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
<commit_before>from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C class Robot(Device): robot_sample_number = C(EpicsSignal, 'ID:Tgt-SP') robot_load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') robot_unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') robot_execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') robot_status = C(EpicsSignal, 'Sts-Sts') robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py <commit_msg>WIP: Add sample loading logic to Robot.<commit_after>from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C from ophyd.utils import set_and_wait class Robot(Device): sample_number = C(EpicsSignal, 'ID:Tgt-SP') load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') status = C(EpicsSignal, 'Sts-Sts') TH_POS = {'capilary':{'load':0, 'measure': 0}, 'flat': {'load': 0, 'measure': 0}, '':{}} DIFF_POS = {'capilary': (1,2),} def __init__(self, theta, diff): self.theta = theta self.diff = diff def load_sample(sample_number, sample_type): # self.theta.move(self.TH_POS[sample_type]['load'], wait=True) set_and_wait(self.sample_number, sample_number) set_and_wait(self.load_cmd, 1) self.execute_cmd.put(1) while self.status.get() != 'Idle': time.sleep(.1) # self.theta.move(self.TH_POS[sample_type]['measure'], wait=True) robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
2c6f5cfb2e90e815d74dca11c395e25875d475be
corehq/ex-submodules/phonelog/tasks.py
corehq/ex-submodules/phonelog/tasks.py
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from phonelog.models import DeviceReportEntry, UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) DeviceReportEntry.objects.filter(server_date__lt=max_age).delete() UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from django.db import connection from phonelog.models import UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) with connection.cursor() as cursor: partitoned_db_format = 'phonelog_daily_partitioned_devicereportentry_y%Yd%j' table_to_drop = (max_age - timedelta(days=1)).strftime(partitoned_db_format) cursor.execute("DROP TABLE {}".format(table_to_drop)) UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
Drop table for device report logs.
Drop table for device report logs.
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from phonelog.models import DeviceReportEntry, UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) DeviceReportEntry.objects.filter(server_date__lt=max_age).delete() UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete() Drop table for device report logs.
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from django.db import connection from phonelog.models import UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) with connection.cursor() as cursor: partitoned_db_format = 'phonelog_daily_partitioned_devicereportentry_y%Yd%j' table_to_drop = (max_age - timedelta(days=1)).strftime(partitoned_db_format) cursor.execute("DROP TABLE {}".format(table_to_drop)) UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
<commit_before>from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from phonelog.models import DeviceReportEntry, UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) DeviceReportEntry.objects.filter(server_date__lt=max_age).delete() UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete() <commit_msg>Drop table for device report logs.<commit_after>
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from django.db import connection from phonelog.models import UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) with connection.cursor() as cursor: partitoned_db_format = 'phonelog_daily_partitioned_devicereportentry_y%Yd%j' table_to_drop = (max_age - timedelta(days=1)).strftime(partitoned_db_format) cursor.execute("DROP TABLE {}".format(table_to_drop)) UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from phonelog.models import DeviceReportEntry, UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) DeviceReportEntry.objects.filter(server_date__lt=max_age).delete() UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete() Drop table for device report logs.from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from django.db import connection from phonelog.models import UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) with connection.cursor() as cursor: partitoned_db_format = 'phonelog_daily_partitioned_devicereportentry_y%Yd%j' table_to_drop = (max_age - timedelta(days=1)).strftime(partitoned_db_format) cursor.execute("DROP TABLE {}".format(table_to_drop)) UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
<commit_before>from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from phonelog.models import DeviceReportEntry, UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) DeviceReportEntry.objects.filter(server_date__lt=max_age).delete() UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete() <commit_msg>Drop table for device report logs.<commit_after>from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from django.db import connection from phonelog.models import UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) with connection.cursor() as cursor: partitoned_db_format = 'phonelog_daily_partitioned_devicereportentry_y%Yd%j' table_to_drop = (max_age - timedelta(days=1)).strftime(partitoned_db_format) cursor.execute("DROP TABLE {}".format(table_to_drop)) UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
02c18a46935d58ac08a340e5011fb345ffb7f83a
h2o-py/tests/testdir_algos/gbm/pyunit_cup98_01GBM_medium.py
h2o-py/tests/testdir_algos/gbm/pyunit_cup98_01GBM_medium.py
import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM)
import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['C1', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM)
Update test for new column naming rules. Remove '' as a name and call column 'C1'
Update test for new column naming rules. Remove '' as a name and call column 'C1'
Python
apache-2.0
bospetersen/h2o-3,spennihana/h2o-3,mrgloom/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,PawarPawan/h2o-v3,jangorecki/h2o-3,junwucs/h2o-3,printedheart/h2o-3,kyoren/https-github.com-h2oai-h2o-3,michalkurka/h2o-3,michalkurka/h2o-3,datachand/h2o-3,ChristosChristofidis/h2o-3,bospetersen/h2o-3,brightchen/h2o-3,jangorecki/h2o-3,mrgloom/h2o-3,spennihana/h2o-3,weaver-viii/h2o-3,tarasane/h2o-3,mrgloom/h2o-3,junwucs/h2o-3,bospetersen/h2o-3,weaver-viii/h2o-3,YzPaul3/h2o-3,datachand/h2o-3,mathemage/h2o-3,junwucs/h2o-3,brightchen/h2o-3,h2oai/h2o-3,mathemage/h2o-3,pchmieli/h2o-3,PawarPawan/h2o-v3,madmax983/h2o-3,printedheart/h2o-3,h2oai/h2o-3,YzPaul3/h2o-3,h2oai/h2o-dev,mathemage/h2o-3,madmax983/h2o-3,brightchen/h2o-3,madmax983/h2o-3,junwucs/h2o-3,pchmieli/h2o-3,nilbody/h2o-3,pchmieli/h2o-3,pchmieli/h2o-3,mrgloom/h2o-3,PawarPawan/h2o-v3,kyoren/https-github.com-h2oai-h2o-3,weaver-viii/h2o-3,YzPaul3/h2o-3,mrgloom/h2o-3,junwucs/h2o-3,h2oai/h2o-3,bospetersen/h2o-3,brightchen/h2o-3,h2oai/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,ChristosChristofidis/h2o-3,YzPaul3/h2o-3,weaver-viii/h2o-3,weaver-viii/h2o-3,h2oai/h2o-dev,datachand/h2o-3,ChristosChristofidis/h2o-3,michalkurka/h2o-3,datachand/h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,nilbody/h2o-3,madmax983/h2o-3,spennihana/h2o-3,bospetersen/h2o-3,h2oai/h2o-dev,nilbody/h2o-3,YzPaul3/h2o-3,junwucs/h2o-3,ChristosChristofidis/h2o-3,tarasane/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,madmax983/h2o-3,ChristosChristofidis/h2o-3,pchmieli/h2o-3,mathemage/h2o-3,nilbody/h2o-3,ChristosChristofidis/h2o-3,nilbody/h2o-3,tarasane/h2o-3,weaver-viii/h2o-3,mrgloom/h2o-3,spennihana/h2o-3,mathemage/h2o-3,printedheart/h2o-3,h2oai/h2o-3,printedheart/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,tarasane/h2o-3,PawarPawan/h2o-v3,tarasane/h2o-3,h2oai/h2o-3,mathemage/h2o-3,spennihana/h2o-3,mathemage/h2o-3,junwucs/h2o-3,madmax983/h2o-3,pchmieli/h2o-3,brightchen/h2o-3,PawarPawan/h2o-v3,bospetersen/h2o-3,michalkurka/h2o-3,datachand/h2o-3,datachand/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,kyoren/https-github.com-h2oai-h2o-3,PawarPawan/h2o-v3,weaver-viii/h2o-3,kyoren/https-github.com-h2oai-h2o-3,datachand/h2o-3,nilbody/h2o-3,kyoren/https-github.com-h2oai-h2o-3,h2oai/h2o-dev,YzPaul3/h2o-3,brightchen/h2o-3,YzPaul3/h2o-3,ChristosChristofidis/h2o-3,printedheart/h2o-3,printedheart/h2o-3,tarasane/h2o-3,mrgloom/h2o-3,nilbody/h2o-3,h2oai/h2o-dev,jangorecki/h2o-3,michalkurka/h2o-3,brightchen/h2o-3,h2oai/h2o-dev,kyoren/https-github.com-h2oai-h2o-3,bospetersen/h2o-3,h2oai/h2o-3,printedheart/h2o-3,PawarPawan/h2o-v3,tarasane/h2o-3,pchmieli/h2o-3
import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM) Update test for new column naming rules. Remove '' as a name and call column 'C1'
import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['C1', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM)
<commit_before>import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM) <commit_msg>Update test for new column naming rules. Remove '' as a name and call column 'C1'<commit_after>
import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['C1', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM)
import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM) Update test for new column naming rules. Remove '' as a name and call column 'C1'import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['C1', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM)
<commit_before>import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM) <commit_msg>Update test for new column naming rules. Remove '' as a name and call column 'C1'<commit_after>import sys sys.path.insert(1, "../../../") import h2o def cupMediumGBM(ip,port): # Connect to h2o h2o.init(ip,port) train = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98LRN_z.csv")) test = h2o.import_frame(path=h2o.locate("bigdata/laptop/usecases/cup98VAL_z.csv")) train["TARGET_B"] = train["TARGET_B"].asfactor() # Train H2O GBM Model: train_cols = train.names() for c in ['C1', "TARGET_D", "TARGET_B", "CONTROLN"]: train_cols.remove(c) model = h2o.gbm(x=train[train_cols], y=train["TARGET_B"], distribution = "bernoulli", ntrees = 5) if __name__ == "__main__": h2o.run_test(sys.argv, cupMediumGBM)
5025bff2ca9a4f31a371ecbd9255b1fb92b9cc4d
kafka_influxdb/encoder/echo_encoder.py
kafka_influxdb/encoder/echo_encoder.py
class Encoder(object): @staticmethod def encode(msg): """ Don't change the message at all :param msg: """ return msg
try: # Test for mypy support (requires Python 3) from typing import Text except: pass class Encoder(object): @staticmethod def encode(msg): # type: (bytes) -> List[bytes] """ Don't change the message at all :param msg: """ return [msg]
Return a list of messages in echo encoder and add mypy type hints
Return a list of messages in echo encoder and add mypy type hints
Python
apache-2.0
mre/kafka-influxdb,mre/kafka-influxdb
class Encoder(object): @staticmethod def encode(msg): """ Don't change the message at all :param msg: """ return msg Return a list of messages in echo encoder and add mypy type hints
try: # Test for mypy support (requires Python 3) from typing import Text except: pass class Encoder(object): @staticmethod def encode(msg): # type: (bytes) -> List[bytes] """ Don't change the message at all :param msg: """ return [msg]
<commit_before>class Encoder(object): @staticmethod def encode(msg): """ Don't change the message at all :param msg: """ return msg <commit_msg>Return a list of messages in echo encoder and add mypy type hints<commit_after>
try: # Test for mypy support (requires Python 3) from typing import Text except: pass class Encoder(object): @staticmethod def encode(msg): # type: (bytes) -> List[bytes] """ Don't change the message at all :param msg: """ return [msg]
class Encoder(object): @staticmethod def encode(msg): """ Don't change the message at all :param msg: """ return msg Return a list of messages in echo encoder and add mypy type hintstry: # Test for mypy support (requires Python 3) from typing import Text except: pass class Encoder(object): @staticmethod def encode(msg): # type: (bytes) -> List[bytes] """ Don't change the message at all :param msg: """ return [msg]
<commit_before>class Encoder(object): @staticmethod def encode(msg): """ Don't change the message at all :param msg: """ return msg <commit_msg>Return a list of messages in echo encoder and add mypy type hints<commit_after>try: # Test for mypy support (requires Python 3) from typing import Text except: pass class Encoder(object): @staticmethod def encode(msg): # type: (bytes) -> List[bytes] """ Don't change the message at all :param msg: """ return [msg]
2722a59aad0775f1bcd1e81232ff445b9012a2ae
ssim/compat.py
ssim/compat.py
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: basestring = basestring # pylint: disable=invalid-name
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: # pylint: disable=redefined-variable-type basestring = basestring # pylint: disable=invalid-name
Add pylint to disable redefined variable.
Add pylint to disable redefined variable.
Python
mit
jterrace/pyssim
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: basestring = basestring # pylint: disable=invalid-name Add pylint to disable redefined variable.
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: # pylint: disable=redefined-variable-type basestring = basestring # pylint: disable=invalid-name
<commit_before>"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: basestring = basestring # pylint: disable=invalid-name <commit_msg>Add pylint to disable redefined variable.<commit_after>
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: # pylint: disable=redefined-variable-type basestring = basestring # pylint: disable=invalid-name
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: basestring = basestring # pylint: disable=invalid-name Add pylint to disable redefined variable."""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: # pylint: disable=redefined-variable-type basestring = basestring # pylint: disable=invalid-name
<commit_before>"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: basestring = basestring # pylint: disable=invalid-name <commit_msg>Add pylint to disable redefined variable.<commit_after>"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: # pylint: disable=redefined-variable-type basestring = basestring # pylint: disable=invalid-name
659659270ef067baf0edea5de5bb10fdab532eaa
run-tests.py
run-tests.py
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') if 'SALADIR' in os.environ: # Remove SALADIR from environ to avoid failing tests del os.environ['SALADIR'] run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
Remove SALADIR from environment if present
tests: Remove SALADIR from environment if present
Python
mit
akheron/sala,akheron/sala
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0) tests: Remove SALADIR from environment if present
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') if 'SALADIR' in os.environ: # Remove SALADIR from environ to avoid failing tests del os.environ['SALADIR'] run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
<commit_before>#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0) <commit_msg>tests: Remove SALADIR from environment if present<commit_after>
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') if 'SALADIR' in os.environ: # Remove SALADIR from environ to avoid failing tests del os.environ['SALADIR'] run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0) tests: Remove SALADIR from environment if present#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') if 'SALADIR' in os.environ: # Remove SALADIR from environ to avoid failing tests del os.environ['SALADIR'] run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
<commit_before>#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0) <commit_msg>tests: Remove SALADIR from environment if present<commit_after>#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') if 'SALADIR' in os.environ: # Remove SALADIR from environ to avoid failing tests del os.environ['SALADIR'] run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
431720194c20dde7b19236d2302c0f9910fd7ea4
pseudorandom.py
pseudorandom.py
import os from flask import Flask, render_template from names import get_full_name app = Flask(__name__) @app.route("/") def index(): return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
Send just plaintext name if curl is used
Send just plaintext name if curl is used
Python
mit
treyhunner/pseudorandom.name,treyhunner/pseudorandom.name
import os from flask import Flask, render_template from names import get_full_name app = Flask(__name__) @app.route("/") def index(): return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) Send just plaintext name if curl is used
import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
<commit_before>import os from flask import Flask, render_template from names import get_full_name app = Flask(__name__) @app.route("/") def index(): return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) <commit_msg>Send just plaintext name if curl is used<commit_after>
import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
import os from flask import Flask, render_template from names import get_full_name app = Flask(__name__) @app.route("/") def index(): return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) Send just plaintext name if curl is usedimport os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
<commit_before>import os from flask import Flask, render_template from names import get_full_name app = Flask(__name__) @app.route("/") def index(): return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port) <commit_msg>Send just plaintext name if curl is used<commit_after>import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
411ae98889d3611151a6f94d661b86b1bbc5e026
apis/Google.Cloud.Speech.V1/synth.py
apis/Google.Cloud.Speech.V1/synth.py
import os from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name shell.run( f'"C:\\Program Files\\Git\\bin\\bash.exe" generateapis.sh {package}', cwd = root, hide_output = False)
import sys from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name bash = '/bin/bash' if sys.platform == 'win32': bash = '"C:\\Program Files\\Git\\bin\\bash.exe"' shell.run( f'{bash} generateapis.sh {package}', cwd = root, hide_output = False)
Use the right bash command based on platform
Use the right bash command based on platform
Python
apache-2.0
googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/gcloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet
import os from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name shell.run( f'"C:\\Program Files\\Git\\bin\\bash.exe" generateapis.sh {package}', cwd = root, hide_output = False) Use the right bash command based on platform
import sys from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name bash = '/bin/bash' if sys.platform == 'win32': bash = '"C:\\Program Files\\Git\\bin\\bash.exe"' shell.run( f'{bash} generateapis.sh {package}', cwd = root, hide_output = False)
<commit_before>import os from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name shell.run( f'"C:\\Program Files\\Git\\bin\\bash.exe" generateapis.sh {package}', cwd = root, hide_output = False) <commit_msg>Use the right bash command based on platform<commit_after>
import sys from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name bash = '/bin/bash' if sys.platform == 'win32': bash = '"C:\\Program Files\\Git\\bin\\bash.exe"' shell.run( f'{bash} generateapis.sh {package}', cwd = root, hide_output = False)
import os from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name shell.run( f'"C:\\Program Files\\Git\\bin\\bash.exe" generateapis.sh {package}', cwd = root, hide_output = False) Use the right bash command based on platformimport sys from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name bash = '/bin/bash' if sys.platform == 'win32': bash = '"C:\\Program Files\\Git\\bin\\bash.exe"' shell.run( f'{bash} generateapis.sh {package}', cwd = root, hide_output = False)
<commit_before>import os from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name shell.run( f'"C:\\Program Files\\Git\\bin\\bash.exe" generateapis.sh {package}', cwd = root, hide_output = False) <commit_msg>Use the right bash command based on platform<commit_after>import sys from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name bash = '/bin/bash' if sys.platform == 'win32': bash = '"C:\\Program Files\\Git\\bin\\bash.exe"' shell.run( f'{bash} generateapis.sh {package}', cwd = root, hide_output = False)
a1f26386bec0c4d39bce77d0fd3975ae4b0930d0
apps/package/tests/test_handlers.py
apps/package/tests/test_handlers.py
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos())
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) self.assertRaises(ImportError, lambda: get_repo("xyzzy"))
Test what get_repo() does for unsupported repos
Test what get_repo() does for unsupported repos
Python
mit
nanuxbe/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,QLGu/djangopackages,miketheman/opencomparison,benracine/opencomparison,QLGu/djangopackages,cartwheelweb/packaginator,miketheman/opencomparison,audreyr/opencomparison,benracine/opencomparison,audreyr/opencomparison,QLGu/djangopackages,cartwheelweb/packaginator,nanuxbe/djangopackages,pydanny/djangopackages,pydanny/djangopackages,cartwheelweb/packaginator
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) Test what get_repo() does for unsupported repos
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) self.assertRaises(ImportError, lambda: get_repo("xyzzy"))
<commit_before>from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) <commit_msg>Test what get_repo() does for unsupported repos<commit_after>
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) self.assertRaises(ImportError, lambda: get_repo("xyzzy"))
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) Test what get_repo() does for unsupported reposfrom django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) self.assertRaises(ImportError, lambda: get_repo("xyzzy"))
<commit_before>from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) <commit_msg>Test what get_repo() does for unsupported repos<commit_after>from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) self.assertRaises(ImportError, lambda: get_repo("xyzzy"))
ab91d525abb5bb1ef476f3aac2c034e50f85617a
src/apps/contacts/mixins.py
src/apps/contacts/mixins.py
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: :return: """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: bool Return only primary contacts. :return: dict """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
Fix description for contact mixin
Fix description for contact mixin
Python
mit
wis-software/office-manager
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: :return: """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results Fix description for contact mixin
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: bool Return only primary contacts. :return: dict """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
<commit_before>from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: :return: """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results <commit_msg>Fix description for contact mixin<commit_after>
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: bool Return only primary contacts. :return: dict """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: :return: """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results Fix description for contact mixinfrom apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: bool Return only primary contacts. :return: dict """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
<commit_before>from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: :return: """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results <commit_msg>Fix description for contact mixin<commit_after>from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: bool Return only primary contacts. :return: dict """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
51d09e2552c31e74b85c2e6bd12bcbab7e1b2047
pygs/test/stress_utils.py
pygs/test/stress_utils.py
import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps -p %s u"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )
import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps u -p %s"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )
Make ps shell-out Mac and debian compliant.
Make ps shell-out Mac and debian compliant.
Python
bsd-3-clause
jeriksson/graphserver,jeriksson/graphserver,graphserver/graphserver,bmander/graphserver,brendannee/Bikesy-Backend,brendannee/Bikesy-Backend,brendannee/Bikesy-Backend,jeriksson/graphserver,jeriksson/graphserver,bmander/graphserver,jeriksson/graphserver,graphserver/graphserver,brendannee/Bikesy-Backend,brendannee/Bikesy-Backend
import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps -p %s u"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )Make ps shell-out Mac and debian compliant.
import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps u -p %s"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )
<commit_before>import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps -p %s u"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )<commit_msg>Make ps shell-out Mac and debian compliant.<commit_after>
import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps u -p %s"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )
import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps -p %s u"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )Make ps shell-out Mac and debian compliant.import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps u -p %s"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )
<commit_before>import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps -p %s u"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )<commit_msg>Make ps shell-out Mac and debian compliant.<commit_after>import os import sys def get_mem_usage(): """returns percentage and vsz mem usage of this script""" pid = os.getpid() psout = os.popen( "ps u -p %s"%pid ).read() parsed_psout = psout.split("\n")[1].split() return float(parsed_psout[3]), int( parsed_psout[4] )
b05eacfa7f2a3fb653ec4a9653780d211245bfb1
pyvac/helpers/calendar.py
pyvac/helpers/calendar.py
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = caldav.Principal(client, url) calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server
Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server
Python
bsd-3-clause
doyousoft/pyvac,sayoun/pyvac,doyousoft/pyvac,sayoun/pyvac,doyousoft/pyvac,sayoun/pyvac
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = caldav.Principal(client, url) calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
<commit_before>import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = caldav.Principal(client, url) calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True <commit_msg>Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server<commit_after>
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = caldav.Principal(client, url) calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav serverimport logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
<commit_before>import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = caldav.Principal(client, url) calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True <commit_msg>Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server<commit_after>import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
548e54c0a3e5fe7115b6f92e449c53f5a08ba5de
tests/setup.py
tests/setup.py
import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('tests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('numsconstests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
Change the name for end-to-end numscons tests
Change the name for end-to-end numscons tests
Python
bsd-3-clause
cournape/numscons,cournape/numscons,cournape/numscons
import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('tests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration) Change the name for end-to-end numscons tests
import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('numsconstests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
<commit_before>import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('tests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration) <commit_msg>Change the name for end-to-end numscons tests<commit_after>
import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('numsconstests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('tests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration) Change the name for end-to-end numscons testsimport os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('numsconstests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
<commit_before>import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('tests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration) <commit_msg>Change the name for end-to-end numscons tests<commit_after>import os import os.path def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration config = Configuration('numsconstests',parent_package,top_path) config.add_subpackage('examples') return config if __name__ == '__main__': from numpy.distutils.core import setup setup(configuration=configuration)
d51d9cc67eca9566673e963e824dc335eb47a9af
recipy/utils.py
recipy/utils.py
import sys from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if 'mode' in kwargs.keys(): mode = kwargs['mode'] # Python 3 else: try: mode = args[1] # Python 2 except: mode = 'r' # Default (in Python 2) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') if sys.version_info[0] == 2: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f)
import six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if six.PY3: mode = kwargs['mode'] f = __builtins__['open'](*args, **kwargs) else: try: mode = args[1] except: mode = 'r' if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') return(f)
Use six instead of sys.version_info
Use six instead of sys.version_info
Python
apache-2.0
recipy/recipy,recipy/recipy
import sys from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if 'mode' in kwargs.keys(): mode = kwargs['mode'] # Python 3 else: try: mode = args[1] # Python 2 except: mode = 'r' # Default (in Python 2) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') if sys.version_info[0] == 2: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f) Use six instead of sys.version_info
import six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if six.PY3: mode = kwargs['mode'] f = __builtins__['open'](*args, **kwargs) else: try: mode = args[1] except: mode = 'r' if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') return(f)
<commit_before>import sys from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if 'mode' in kwargs.keys(): mode = kwargs['mode'] # Python 3 else: try: mode = args[1] # Python 2 except: mode = 'r' # Default (in Python 2) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') if sys.version_info[0] == 2: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f) <commit_msg>Use six instead of sys.version_info<commit_after>
import six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if six.PY3: mode = kwargs['mode'] f = __builtins__['open'](*args, **kwargs) else: try: mode = args[1] except: mode = 'r' if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') return(f)
import sys from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if 'mode' in kwargs.keys(): mode = kwargs['mode'] # Python 3 else: try: mode = args[1] # Python 2 except: mode = 'r' # Default (in Python 2) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') if sys.version_info[0] == 2: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f) Use six instead of sys.version_infoimport six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if six.PY3: mode = kwargs['mode'] f = __builtins__['open'](*args, **kwargs) else: try: mode = args[1] except: mode = 'r' if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') return(f)
<commit_before>import sys from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if 'mode' in kwargs.keys(): mode = kwargs['mode'] # Python 3 else: try: mode = args[1] # Python 2 except: mode = 'r' # Default (in Python 2) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') if sys.version_info[0] == 2: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f) <commit_msg>Use six instead of sys.version_info<commit_after>import six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if six.PY3: mode = kwargs['mode'] f = __builtins__['open'](*args, **kwargs) else: try: mode = args[1] except: mode = 'r' if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') return(f)
9e1cf6ecf8104b38c85a00e973873cbfa7d78236
bytecode.py
bytecode.py
class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
Add autoincrement for jump in the future
Add autoincrement for jump in the future
Python
bsd-3-clause
darbaga/simple_compiler
class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False Add autoincrement for jump in the future
class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
<commit_before>class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False <commit_msg>Add autoincrement for jump in the future<commit_after>
class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False Add autoincrement for jump in the futureclass BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
<commit_before>class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False <commit_msg>Add autoincrement for jump in the future<commit_after>class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
864653259bdcddf62f6d3c8f270099e99fbb8457
numba/cuda/tests/cudapy/test_userexc.py
numba/cuda/tests/cudapy/test_userexc.py
from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file ([\.\/\\a-zA-Z_0-9]+), line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main()
from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main()
Fix up regex for path matching
Fix up regex for path matching
Python
bsd-2-clause
stuartarchibald/numba,numba/numba,jriehl/numba,jriehl/numba,IntelLabs/numba,seibert/numba,seibert/numba,gmarkall/numba,stonebig/numba,cpcloud/numba,jriehl/numba,IntelLabs/numba,numba/numba,stonebig/numba,sklam/numba,jriehl/numba,seibert/numba,cpcloud/numba,sklam/numba,stuartarchibald/numba,numba/numba,sklam/numba,jriehl/numba,cpcloud/numba,gmarkall/numba,sklam/numba,sklam/numba,gmarkall/numba,numba/numba,stonebig/numba,stonebig/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,stuartarchibald/numba,stuartarchibald/numba,IntelLabs/numba,seibert/numba,gmarkall/numba,IntelLabs/numba,IntelLabs/numba,cpcloud/numba,stonebig/numba
from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file ([\.\/\\a-zA-Z_0-9]+), line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main() Fix up regex for path matching
from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main()
<commit_before>from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file ([\.\/\\a-zA-Z_0-9]+), line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main() <commit_msg>Fix up regex for path matching<commit_after>
from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main()
from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file ([\.\/\\a-zA-Z_0-9]+), line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main() Fix up regex for path matchingfrom __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main()
<commit_before>from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file ([\.\/\\a-zA-Z_0-9]+), line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main() <commit_msg>Fix up regex for path matching<commit_after>from __future__ import print_function, absolute_import, division from numba.cuda.testing import unittest, SerialMixin, skip_on_cudasim from numba import cuda, config class MyError(Exception): pass regex_pattern = ( r'In function [\'"]test_exc[\'"], file [\.\/\\\-a-zA-Z_0-9]+, line \d+' ) class TestUserExc(SerialMixin, unittest.TestCase): def test_user_exception(self): @cuda.jit("void(int32)", debug=True) def test_exc(x): if x == 1: raise MyError elif x == 2: raise MyError("foo") test_exc(0) # no raise with self.assertRaises(MyError) as cm: test_exc(1) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]", str(cm.exception)) with self.assertRaises(MyError) as cm: test_exc(2) if not config.ENABLE_CUDASIM: self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertRegexpMatches(str(cm.exception), regex_pattern) self.assertIn("tid=[0, 0, 0] ctaid=[0, 0, 0]: foo", str(cm.exception)) if __name__ == '__main__': unittest.main()
2034c8280800291227232435786441bfb0edace0
tests/cli.py
tests/cli.py
import os from spec import eq_ from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n")
import os from spec import eq_, skip from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") def boolean_args(self): cmd = "taskname --boolean" skip() def flag_then_space_then_value(self): cmd = "taskname --flag value" skip() def flag_then_equals_sign_then_value(self): cmd = "taskname --flag=value" skip() def short_boolean_flag(self): cmd = "taskname -f" skip() def short_flag_then_space_then_value(self): cmd = "taskname -f value" skip() def short_flag_then_equals_sign_then_value(self): cmd = "taskname -f=value" skip() def short_flag_with_adjacent_value(self): cmd = "taskname -fvalue" skip() def flag_value_then_task(self): cmd = "task1 -f notatask task2" skip() def flag_value_same_as_task_name(self): cmd = "task1 -f mytask mytask" skip() def complex_multitask_invocation(self): cmd = "-c integration task1 --bool_arg --val_arg=value task2 --val_arg othervalue" skip() def three_tasks_with_args(self): cmd = "task1 --task1_bool task2 --task2_arg task2_arg_value task3" skip()
Add common CLI invocation test stubs.
Add common CLI invocation test stubs. Doesn't go into positional args.
Python
bsd-2-clause
frol/invoke,alex/invoke,mkusz/invoke,mattrobenolt/invoke,mattrobenolt/invoke,kejbaly2/invoke,kejbaly2/invoke,pyinvoke/invoke,pfmoore/invoke,sophacles/invoke,pfmoore/invoke,mkusz/invoke,tyewang/invoke,pyinvoke/invoke,singingwolfboy/invoke,frol/invoke
import os from spec import eq_ from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") Add common CLI invocation test stubs. Doesn't go into positional args.
import os from spec import eq_, skip from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") def boolean_args(self): cmd = "taskname --boolean" skip() def flag_then_space_then_value(self): cmd = "taskname --flag value" skip() def flag_then_equals_sign_then_value(self): cmd = "taskname --flag=value" skip() def short_boolean_flag(self): cmd = "taskname -f" skip() def short_flag_then_space_then_value(self): cmd = "taskname -f value" skip() def short_flag_then_equals_sign_then_value(self): cmd = "taskname -f=value" skip() def short_flag_with_adjacent_value(self): cmd = "taskname -fvalue" skip() def flag_value_then_task(self): cmd = "task1 -f notatask task2" skip() def flag_value_same_as_task_name(self): cmd = "task1 -f mytask mytask" skip() def complex_multitask_invocation(self): cmd = "-c integration task1 --bool_arg --val_arg=value task2 --val_arg othervalue" skip() def three_tasks_with_args(self): cmd = "task1 --task1_bool task2 --task2_arg task2_arg_value task3" skip()
<commit_before>import os from spec import eq_ from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") <commit_msg>Add common CLI invocation test stubs. Doesn't go into positional args.<commit_after>
import os from spec import eq_, skip from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") def boolean_args(self): cmd = "taskname --boolean" skip() def flag_then_space_then_value(self): cmd = "taskname --flag value" skip() def flag_then_equals_sign_then_value(self): cmd = "taskname --flag=value" skip() def short_boolean_flag(self): cmd = "taskname -f" skip() def short_flag_then_space_then_value(self): cmd = "taskname -f value" skip() def short_flag_then_equals_sign_then_value(self): cmd = "taskname -f=value" skip() def short_flag_with_adjacent_value(self): cmd = "taskname -fvalue" skip() def flag_value_then_task(self): cmd = "task1 -f notatask task2" skip() def flag_value_same_as_task_name(self): cmd = "task1 -f mytask mytask" skip() def complex_multitask_invocation(self): cmd = "-c integration task1 --bool_arg --val_arg=value task2 --val_arg othervalue" skip() def three_tasks_with_args(self): cmd = "task1 --task1_bool task2 --task2_arg task2_arg_value task3" skip()
import os from spec import eq_ from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") Add common CLI invocation test stubs. Doesn't go into positional args.import os from spec import eq_, skip from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") def boolean_args(self): cmd = "taskname --boolean" skip() def flag_then_space_then_value(self): cmd = "taskname --flag value" skip() def flag_then_equals_sign_then_value(self): cmd = "taskname --flag=value" skip() def short_boolean_flag(self): cmd = "taskname -f" skip() def short_flag_then_space_then_value(self): cmd = "taskname -f value" skip() def short_flag_then_equals_sign_then_value(self): cmd = "taskname -f=value" skip() def short_flag_with_adjacent_value(self): cmd = "taskname -fvalue" skip() def flag_value_then_task(self): cmd = "task1 -f notatask task2" skip() def flag_value_same_as_task_name(self): cmd = "task1 -f mytask mytask" skip() def complex_multitask_invocation(self): cmd = "-c integration task1 --bool_arg --val_arg=value task2 --val_arg othervalue" skip() def three_tasks_with_args(self): cmd = "task1 --task1_bool task2 --task2_arg task2_arg_value task3" skip()
<commit_before>import os from spec import eq_ from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") <commit_msg>Add common CLI invocation test stubs. Doesn't go into positional args.<commit_after>import os from spec import eq_, skip from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") def boolean_args(self): cmd = "taskname --boolean" skip() def flag_then_space_then_value(self): cmd = "taskname --flag value" skip() def flag_then_equals_sign_then_value(self): cmd = "taskname --flag=value" skip() def short_boolean_flag(self): cmd = "taskname -f" skip() def short_flag_then_space_then_value(self): cmd = "taskname -f value" skip() def short_flag_then_equals_sign_then_value(self): cmd = "taskname -f=value" skip() def short_flag_with_adjacent_value(self): cmd = "taskname -fvalue" skip() def flag_value_then_task(self): cmd = "task1 -f notatask task2" skip() def flag_value_same_as_task_name(self): cmd = "task1 -f mytask mytask" skip() def complex_multitask_invocation(self): cmd = "-c integration task1 --bool_arg --val_arg=value task2 --val_arg othervalue" skip() def three_tasks_with_args(self): cmd = "task1 --task1_bool task2 --task2_arg task2_arg_value task3" skip()
2e0286632b9120fe6a788db4483911513a39fe04
fabfile.py
fabfile.py
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git reset --hard origin/master") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
Reset to upstream master instead of rebasing during deployment
Reset to upstream master instead of rebasing during deployment
Python
bsd-3-clause
FreeMusicNinja/api.freemusic.ninja
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart") Reset to upstream master instead of rebasing during deployment
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git reset --hard origin/master") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
<commit_before>from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart") <commit_msg>Reset to upstream master instead of rebasing during deployment<commit_after>
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git reset --hard origin/master") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart") Reset to upstream master instead of rebasing during deploymentfrom fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git reset --hard origin/master") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
<commit_before>from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart") <commit_msg>Reset to upstream master instead of rebasing during deployment<commit_after>from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git reset --hard origin/master") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
caf94786ca8c0bc9e3995da0a160c84921a3bfc6
fabfile.py
fabfile.py
from fabric.api import task, sudo, env, local from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
from fabric.api import task, sudo, env, local, hosts from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task @hosts("paramiko.org") def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
Update doc upload task w/ static hostname
Update doc upload task w/ static hostname
Python
lgpl-2.1
torkil/paramiko,redixin/paramiko,SebastianDeiss/paramiko,fvicente/paramiko,zpzgone/paramiko,mirrorcoder/paramiko,reaperhulk/paramiko,jaraco/paramiko,rcorrieri/paramiko,paramiko/paramiko,CptLemming/paramiko,anadigi/paramiko,digitalquacks/paramiko,ameily/paramiko,selboo/paramiko,remram44/paramiko,varunarya10/paramiko,davidbistolas/paramiko,Automatic/paramiko,dlitz/paramiko,thusoy/paramiko,mhdaimi/paramiko,zarr12steven/paramiko,thisch/paramiko,esc/paramiko,toby82/paramiko,jorik041/paramiko,dorianpula/paramiko,mitsuhiko/paramiko,alex/paramiko
from fabric.api import task, sudo, env, local from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs() Update doc upload task w/ static hostname
from fabric.api import task, sudo, env, local, hosts from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task @hosts("paramiko.org") def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
<commit_before>from fabric.api import task, sudo, env, local from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs() <commit_msg>Update doc upload task w/ static hostname<commit_after>
from fabric.api import task, sudo, env, local, hosts from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task @hosts("paramiko.org") def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
from fabric.api import task, sudo, env, local from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs() Update doc upload task w/ static hostnamefrom fabric.api import task, sudo, env, local, hosts from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task @hosts("paramiko.org") def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
<commit_before>from fabric.api import task, sudo, env, local from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs() <commit_msg>Update doc upload task w/ static hostname<commit_after>from fabric.api import task, sudo, env, local, hosts from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task @hosts("paramiko.org") def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
06c8c91b05e0bf5f15271560df4101d90adfeb39
Lib/test/test_capi.py
Lib/test/test_capi.py
# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _test exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1]
# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _testcapi exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1]
Fix typo in comment (the module is now called _testcapi, not _test).
Fix typo in comment (the module is now called _testcapi, not _test).
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _test exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1] Fix typo in comment (the module is now called _testcapi, not _test).
# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _testcapi exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1]
<commit_before># Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _test exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1] <commit_msg>Fix typo in comment (the module is now called _testcapi, not _test).<commit_after>
# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _testcapi exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1]
# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _test exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1] Fix typo in comment (the module is now called _testcapi, not _test).# Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _testcapi exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1]
<commit_before># Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _test exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1] <commit_msg>Fix typo in comment (the module is now called _testcapi, not _test).<commit_after># Run the _testcapi module tests (tests for the Python/C API): by defn, # these are all functions _testcapi exports whose name begins with 'test_'. import sys import test_support import _testcapi for name in dir(_testcapi): if name.startswith('test_'): test = getattr(_testcapi, name) if test_support.verbose: print "internal", name try: test() except _testcapi.error: raise test_support.TestFailed, sys.exc_info()[1]
b7cc9c5d2275a87849701e8a7307e26680bb740a
xvistaprof/reader.py
xvistaprof/reader.py
#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader)
#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skip_header=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader)
Update genfromtxt skip_header for numpy 1.10+
Update genfromtxt skip_header for numpy 1.10+
Python
bsd-2-clause
jonathansick/xvistaprof
#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader) Update genfromtxt skip_header for numpy 1.10+
#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skip_header=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader)
<commit_before>#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader) <commit_msg>Update genfromtxt skip_header for numpy 1.10+<commit_after>
#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skip_header=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader)
#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader) Update genfromtxt skip_header for numpy 1.10+#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skip_header=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader)
<commit_before>#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skiprows=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader) <commit_msg>Update genfromtxt skip_header for numpy 1.10+<commit_after>#!/usr/bin/env python # encoding: utf-8 """ Reader for XVISTA .prof tables. """ import numpy as np from astropy.table import Table from astropy.io import registry def xvista_table_reader(filename): dt = [('R', np.float), ('SB', np.float), ('SB_err', np.float), ('ELL', np.float), ('PA', np.float), ('EMAG', np.float), ('ELLMAG', np.float), ('ELLMAG_err', np.float), ('XC', np.float), ('YC', np.float), ('FRACONT', np.float), ('A1', np.float), ('A2', np.float), ('A4', np.float), ('CIRCMAG', np.float)] data = np.genfromtxt(filename, dtype=np.dtype(dt), skip_header=15, missing_values='*', filling_values=np.nan) return Table(data) registry.register_reader('xvistaprof', Table, xvista_table_reader)
e0b7b6ccdd947324ac72b48a28d6c68c7e980d96
ibmcnx/doc/DataSources.py
ibmcnx/doc/DataSources.py
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/')) for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource',AdminConfig.getid('/Cell:cnxwas1Cell01/')).splitlines() for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
Create documentation of DataSource Settings
8: Create documentation of DataSource Settings Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/')) for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource',AdminConfig.getid('/Cell:cnxwas1Cell01/')).splitlines() for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
<commit_before>###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/')) for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource',AdminConfig.getid('/Cell:cnxwas1Cell01/')).splitlines() for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/')) for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource',AdminConfig.getid('/Cell:cnxwas1Cell01/')).splitlines() for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
<commit_before>###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/')) for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource',AdminConfig.getid('/Cell:cnxwas1Cell01/')).splitlines() for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
c6275896adb429fad7f8bebb74ce932739ecfb63
edx_shopify/views.py
edx_shopify/views.py
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], email=data['customer']['email'], first_name=data['customer']['first_name'], last_name=data['customer']['last_name']) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], defaults={ 'email': data['customer']['email'], 'first_name': data['customer']['first_name'], 'last_name': data['customer']['last_name'] } ) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
Use get_or_create correctly on Order
Use get_or_create correctly on Order
Python
agpl-3.0
hastexo/edx-shopify,fghaas/edx-shopify
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], email=data['customer']['email'], first_name=data['customer']['first_name'], last_name=data['customer']['last_name']) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200) Use get_or_create correctly on Order
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], defaults={ 'email': data['customer']['email'], 'first_name': data['customer']['first_name'], 'last_name': data['customer']['last_name'] } ) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
<commit_before>import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], email=data['customer']['email'], first_name=data['customer']['first_name'], last_name=data['customer']['last_name']) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200) <commit_msg>Use get_or_create correctly on Order<commit_after>
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], defaults={ 'email': data['customer']['email'], 'first_name': data['customer']['first_name'], 'last_name': data['customer']['last_name'] } ) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], email=data['customer']['email'], first_name=data['customer']['first_name'], last_name=data['customer']['last_name']) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200) Use get_or_create correctly on Orderimport copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], defaults={ 'email': data['customer']['email'], 'first_name': data['customer']['first_name'], 'last_name': data['customer']['last_name'] } ) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
<commit_before>import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], email=data['customer']['email'], first_name=data['customer']['first_name'], last_name=data['customer']['last_name']) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200) <commit_msg>Use get_or_create correctly on Order<commit_after>import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], defaults={ 'email': data['customer']['email'], 'first_name': data['customer']['first_name'], 'last_name': data['customer']['last_name'] } ) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
cfdbe06da6e35f2cb166374cf249d51f18e1224e
pryvate/blueprints/packages/packages.py
pryvate/blueprints/packages/packages.py
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype})
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
Return a 404 if the package was not found
Return a 404 if the package was not found
Python
mit
Dinoshauer/pryvate,Dinoshauer/pryvate
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) Return a 404 if the package was not found
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
<commit_before>"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) <commit_msg>Return a 404 if the package was not found<commit_after>
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) Return a 404 if the package was not found"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
<commit_before>"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) <commit_msg>Return a 404 if the package was not found<commit_after>"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
e1b0222c8a3ed39bf76af10484a94aa4cfe5adc8
googlesearch/templatetags/search_tags.py
googlesearch/templatetags/search_tags.py
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
Remove last_page not needed anymore.
Remove last_page not needed anymore.
Python
mit
hzdg/django-google-search,hzdg/django-google-search
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context Remove last_page not needed anymore.
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
<commit_before>import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context <commit_msg>Remove last_page not needed anymore.<commit_after>
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context Remove last_page not needed anymore.import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
<commit_before>import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context <commit_msg>Remove last_page not needed anymore.<commit_after>import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
451a435ca051305517c79216d7ab9441939f4004
src/amr.py
src/amr.py
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = df.grad(u) costheta = df.dot(m, E) sigma = 1/(1 + costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d, s0=1, alpha=1): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = -df.grad(u) costheta = df.dot(m, E) sigma = s0/(1 + alpha*costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
Add sigma0 and alpha AMR parameters to the function.
Add sigma0 and alpha AMR parameters to the function.
Python
bsd-2-clause
fangohr/fenics-anisotropic-magneto-resistance
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = df.grad(u) costheta = df.dot(m, E) sigma = 1/(1 + costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive() Add sigma0 and alpha AMR parameters to the function.
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d, s0=1, alpha=1): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = -df.grad(u) costheta = df.dot(m, E) sigma = s0/(1 + alpha*costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
<commit_before>import dolfin as df def amr(mesh, m, DirichletBoundary, g, d): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = df.grad(u) costheta = df.dot(m, E) sigma = 1/(1 + costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive() <commit_msg>Add sigma0 and alpha AMR parameters to the function.<commit_after>
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d, s0=1, alpha=1): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = -df.grad(u) costheta = df.dot(m, E) sigma = s0/(1 + alpha*costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = df.grad(u) costheta = df.dot(m, E) sigma = 1/(1 + costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive() Add sigma0 and alpha AMR parameters to the function.import dolfin as df def amr(mesh, m, DirichletBoundary, g, d, s0=1, alpha=1): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = -df.grad(u) costheta = df.dot(m, E) sigma = s0/(1 + alpha*costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
<commit_before>import dolfin as df def amr(mesh, m, DirichletBoundary, g, d): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = df.grad(u) costheta = df.dot(m, E) sigma = 1/(1 + costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive() <commit_msg>Add sigma0 and alpha AMR parameters to the function.<commit_after>import dolfin as df def amr(mesh, m, DirichletBoundary, g, d, s0=1, alpha=1): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = -df.grad(u) costheta = df.dot(m, E) sigma = s0/(1 + alpha*costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
704e1457480d6baa8db7fff245d733303f8a17f5
gpytorch/kernels/white_noise_kernel.py
gpytorch/kernels/white_noise_kernel.py
import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x1.size(-1), device=x1.device)
import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2), device=x1.device)
Fix ZeroLazyVariable size returned by white noise kernel
Fix ZeroLazyVariable size returned by white noise kernel
Python
mit
jrg365/gpytorch,jrg365/gpytorch,jrg365/gpytorch
import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x1.size(-1), device=x1.device) Fix ZeroLazyVariable size returned by white noise kernel
import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2), device=x1.device)
<commit_before>import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x1.size(-1), device=x1.device) <commit_msg>Fix ZeroLazyVariable size returned by white noise kernel<commit_after>
import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2), device=x1.device)
import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x1.size(-1), device=x1.device) Fix ZeroLazyVariable size returned by white noise kernelimport torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2), device=x1.device)
<commit_before>import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x1.size(-1), device=x1.device) <commit_msg>Fix ZeroLazyVariable size returned by white noise kernel<commit_after>import torch from . import Kernel from gpytorch.lazy import DiagLazyVariable, ZeroLazyVariable class WhiteNoiseKernel(Kernel): def __init__(self, variances): super(WhiteNoiseKernel, self).__init__() self.register_buffer("variances", variances) def forward(self, x1, x2): if self.training: return DiagLazyVariable(self.variances.unsqueeze(0)) elif x1.size(-2) == x2.size(-2) and x1.size(-2) == self.variances.size(-1) and torch.equal(x1, x2): return DiagLazyVariable(self.variances.unsqueeze(0)) else: return ZeroLazyVariable(x1.size(-3), x1.size(-2), x2.size(-2), device=x1.device)
9a2169e38374429db7792537e2c4c1a78281200d
src/application/models.py
src/application/models.py
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class ExampleModel(ndb.Model): """Example Model""" example_name = ndb.StringProperty(required=True) example_description = ndb.TextProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) class SchoolModel(ndb.Model): name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class SchoolModel(ndb.Model): """"Basic Model"""" name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
Add Docstrings and fix basic model
Add Docstrings and fix basic model
Python
mit
shashisp/reWrite-SITA,shashisp/reWrite-SITA,shashisp/reWrite-SITA
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class ExampleModel(ndb.Model): """Example Model""" example_name = ndb.StringProperty(required=True) example_description = ndb.TextProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) class SchoolModel(ndb.Model): name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty() Add Docstrings and fix basic model
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class SchoolModel(ndb.Model): """"Basic Model"""" name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
<commit_before>""" models.py App Engine datastore models """ from google.appengine.ext import ndb class ExampleModel(ndb.Model): """Example Model""" example_name = ndb.StringProperty(required=True) example_description = ndb.TextProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) class SchoolModel(ndb.Model): name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty() <commit_msg>Add Docstrings and fix basic model<commit_after>
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class SchoolModel(ndb.Model): """"Basic Model"""" name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class ExampleModel(ndb.Model): """Example Model""" example_name = ndb.StringProperty(required=True) example_description = ndb.TextProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) class SchoolModel(ndb.Model): name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty() Add Docstrings and fix basic model""" models.py App Engine datastore models """ from google.appengine.ext import ndb class SchoolModel(ndb.Model): """"Basic Model"""" name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
<commit_before>""" models.py App Engine datastore models """ from google.appengine.ext import ndb class ExampleModel(ndb.Model): """Example Model""" example_name = ndb.StringProperty(required=True) example_description = ndb.TextProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) class SchoolModel(ndb.Model): name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty() <commit_msg>Add Docstrings and fix basic model<commit_after>""" models.py App Engine datastore models """ from google.appengine.ext import ndb class SchoolModel(ndb.Model): """"Basic Model"""" name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
08ae805a943be3cdd5e92c050512374180b9ae35
indra/sources/geneways/geneways_api.py
indra/sources/geneways/geneways_api.py
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.sources.geneways.processor import GenewaysProcessor def process_geneways(search_path=None): """Reads in Geneways data and returns a list of statements. Parameters ---------- search_path : list a list of directories in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default search path. Returns ------- statements : list[indra.statements.Statement] A list of INDRA statements generated from the Geneways action mentions. """ if search_path is None: search_path = ['./data', '../data', '../../data', '~/data', '.'] processor = GenewaysProcessor(search_path) return processor.statements
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os from indra.sources.geneways.processor import GenewaysProcessor # Path to the INDRA data folder path_this = os.path.dirname(os.path.abspath(__file__)) data_folder = os.path.join(path_this, '../../../data') def process_geneways(input_folder=data_folder): """Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default input folder which is indra/data. Returns ------- gp : GenewaysProcessor A GenewaysProcessor object which contains a list of INDRA statements generated from the Geneways action mentions. """ gp = GenewaysProcessor(input_folder) return gp
Update API to look at one folder and return processor
Update API to look at one folder and return processor
Python
bsd-2-clause
pvtodorov/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,bgyori/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,bgyori/indra,johnbachman/belpy
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.sources.geneways.processor import GenewaysProcessor def process_geneways(search_path=None): """Reads in Geneways data and returns a list of statements. Parameters ---------- search_path : list a list of directories in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default search path. Returns ------- statements : list[indra.statements.Statement] A list of INDRA statements generated from the Geneways action mentions. """ if search_path is None: search_path = ['./data', '../data', '../../data', '~/data', '.'] processor = GenewaysProcessor(search_path) return processor.statements Update API to look at one folder and return processor
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os from indra.sources.geneways.processor import GenewaysProcessor # Path to the INDRA data folder path_this = os.path.dirname(os.path.abspath(__file__)) data_folder = os.path.join(path_this, '../../../data') def process_geneways(input_folder=data_folder): """Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default input folder which is indra/data. Returns ------- gp : GenewaysProcessor A GenewaysProcessor object which contains a list of INDRA statements generated from the Geneways action mentions. """ gp = GenewaysProcessor(input_folder) return gp
<commit_before>""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.sources.geneways.processor import GenewaysProcessor def process_geneways(search_path=None): """Reads in Geneways data and returns a list of statements. Parameters ---------- search_path : list a list of directories in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default search path. Returns ------- statements : list[indra.statements.Statement] A list of INDRA statements generated from the Geneways action mentions. """ if search_path is None: search_path = ['./data', '../data', '../../data', '~/data', '.'] processor = GenewaysProcessor(search_path) return processor.statements <commit_msg>Update API to look at one folder and return processor<commit_after>
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os from indra.sources.geneways.processor import GenewaysProcessor # Path to the INDRA data folder path_this = os.path.dirname(os.path.abspath(__file__)) data_folder = os.path.join(path_this, '../../../data') def process_geneways(input_folder=data_folder): """Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default input folder which is indra/data. Returns ------- gp : GenewaysProcessor A GenewaysProcessor object which contains a list of INDRA statements generated from the Geneways action mentions. """ gp = GenewaysProcessor(input_folder) return gp
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.sources.geneways.processor import GenewaysProcessor def process_geneways(search_path=None): """Reads in Geneways data and returns a list of statements. Parameters ---------- search_path : list a list of directories in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default search path. Returns ------- statements : list[indra.statements.Statement] A list of INDRA statements generated from the Geneways action mentions. """ if search_path is None: search_path = ['./data', '../data', '../../data', '~/data', '.'] processor = GenewaysProcessor(search_path) return processor.statements Update API to look at one folder and return processor""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os from indra.sources.geneways.processor import GenewaysProcessor # Path to the INDRA data folder path_this = os.path.dirname(os.path.abspath(__file__)) data_folder = os.path.join(path_this, '../../../data') def process_geneways(input_folder=data_folder): """Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default input folder which is indra/data. Returns ------- gp : GenewaysProcessor A GenewaysProcessor object which contains a list of INDRA statements generated from the Geneways action mentions. """ gp = GenewaysProcessor(input_folder) return gp
<commit_before>""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.sources.geneways.processor import GenewaysProcessor def process_geneways(search_path=None): """Reads in Geneways data and returns a list of statements. Parameters ---------- search_path : list a list of directories in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default search path. Returns ------- statements : list[indra.statements.Statement] A list of INDRA statements generated from the Geneways action mentions. """ if search_path is None: search_path = ['./data', '../data', '../../data', '~/data', '.'] processor = GenewaysProcessor(search_path) return processor.statements <commit_msg>Update API to look at one folder and return processor<commit_after>""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os from indra.sources.geneways.processor import GenewaysProcessor # Path to the INDRA data folder path_this = os.path.dirname(os.path.abspath(__file__)) data_folder = os.path.join(path_this, '../../../data') def process_geneways(input_folder=data_folder): """Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default input folder which is indra/data. Returns ------- gp : GenewaysProcessor A GenewaysProcessor object which contains a list of INDRA statements generated from the Geneways action mentions. """ gp = GenewaysProcessor(input_folder) return gp
69aa0ec7c79139167e7a2adce1e0effac960755a
flaskrst/__init__.py
flaskrst/__init__.py
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): kwargs = item.copy() del kwargs['route'] del kwargs['name'] link = url_for(item['route'], **kwargs) navigation.append((link, item['name'])) return dict(navigation=navigation)
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): if item.has_key('route') and item.has_key('label'): kwargs = item.copy() del kwargs['route'] del kwargs['label'] link = url_for(item['route'], **kwargs) navigation.append((link, item['label'])) elif item.has_key('url') and item.has_key('label'): navigation.append((item['url'], item['label'])) return dict(navigation=navigation)
Rename navigation config key name to label and add support for links to external sites over the url key name
Rename navigation config key name to label and add support for links to external sites over the url key name
Python
bsd-3-clause
jarus/flask-rst
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): kwargs = item.copy() del kwargs['route'] del kwargs['name'] link = url_for(item['route'], **kwargs) navigation.append((link, item['name'])) return dict(navigation=navigation)Rename navigation config key name to label and add support for links to external sites over the url key name
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): if item.has_key('route') and item.has_key('label'): kwargs = item.copy() del kwargs['route'] del kwargs['label'] link = url_for(item['route'], **kwargs) navigation.append((link, item['label'])) elif item.has_key('url') and item.has_key('label'): navigation.append((item['url'], item['label'])) return dict(navigation=navigation)
<commit_before># -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): kwargs = item.copy() del kwargs['route'] del kwargs['name'] link = url_for(item['route'], **kwargs) navigation.append((link, item['name'])) return dict(navigation=navigation)<commit_msg>Rename navigation config key name to label and add support for links to external sites over the url key name<commit_after>
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): if item.has_key('route') and item.has_key('label'): kwargs = item.copy() del kwargs['route'] del kwargs['label'] link = url_for(item['route'], **kwargs) navigation.append((link, item['label'])) elif item.has_key('url') and item.has_key('label'): navigation.append((item['url'], item['label'])) return dict(navigation=navigation)
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): kwargs = item.copy() del kwargs['route'] del kwargs['name'] link = url_for(item['route'], **kwargs) navigation.append((link, item['name'])) return dict(navigation=navigation)Rename navigation config key name to label and add support for links to external sites over the url key name# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): if item.has_key('route') and item.has_key('label'): kwargs = item.copy() del kwargs['route'] del kwargs['label'] link = url_for(item['route'], **kwargs) navigation.append((link, item['label'])) elif item.has_key('url') and item.has_key('label'): navigation.append((item['url'], item['label'])) return dict(navigation=navigation)
<commit_before># -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): kwargs = item.copy() del kwargs['route'] del kwargs['name'] link = url_for(item['route'], **kwargs) navigation.append((link, item['name'])) return dict(navigation=navigation)<commit_msg>Rename navigation config key name to label and add support for links to external sites over the url key name<commit_after># -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): if item.has_key('route') and item.has_key('label'): kwargs = item.copy() del kwargs['route'] del kwargs['label'] link = url_for(item['route'], **kwargs) navigation.append((link, item['label'])) elif item.has_key('url') and item.has_key('label'): navigation.append((item['url'], item['label'])) return dict(navigation=navigation)
5c21f105057f8c5d10721b6de2c5cf698668fd3c
src/events/admin.py
src/events/admin.py
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']}
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} raw_id_fields = ['host']
Make host field raw ID instead of select
Make host field raw ID instead of select
Python
mit
pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} Make host field raw ID instead of select
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} raw_id_fields = ['host']
<commit_before>from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} <commit_msg>Make host field raw ID instead of select<commit_after>
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} raw_id_fields = ['host']
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} Make host field raw ID instead of selectfrom django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} raw_id_fields = ['host']
<commit_before>from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} <commit_msg>Make host field raw ID instead of select<commit_after>from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} raw_id_fields = ['host']
74e8bf6574ce3658e1b276479c3b6ebec36844a4
kuhn_poker/agents/kuhn_random_agent.py
kuhn_poker/agents/kuhn_random_agent.py
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return print('%s: %s %s' % ( match_state.get_viewing_player(), self.is_fold_valid(), self.is_raise_valid() )) # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
Remove unnecessary log from random agent
Remove unnecessary log from random agent
Python
mit
JakubPetriska/poker-cfr,JakubPetriska/poker-cfr
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return print('%s: %s %s' % ( match_state.get_viewing_player(), self.is_fold_valid(), self.is_raise_valid() )) # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent()) Remove unnecessary log from random agent
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
<commit_before>import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return print('%s: %s %s' % ( match_state.get_viewing_player(), self.is_fold_valid(), self.is_raise_valid() )) # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent()) <commit_msg>Remove unnecessary log from random agent<commit_after>
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return print('%s: %s %s' % ( match_state.get_viewing_player(), self.is_fold_valid(), self.is_raise_valid() )) # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent()) Remove unnecessary log from random agentimport random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
<commit_before>import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return print('%s: %s %s' % ( match_state.get_viewing_player(), self.is_fold_valid(), self.is_raise_valid() )) # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent()) <commit_msg>Remove unnecessary log from random agent<commit_after>import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
fe76abc03f7152f318712e1a233aad42f2e9870a
jsonfield/widgets.py
jsonfield/widgets.py
from django import forms from django.utils import simplejson as json import staticmedia class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( staticmedia.url('js/jquery.js'), staticmedia.url('js/jquery.tmpl.js'), staticmedia.url('js/json-table.js'), staticmedia.url('js/json-table-templates.js'), )
from django import forms from django.utils import simplejson as json from django.conf import settings class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( settings.STATICFILES_URL + 'js/jquery.js', settings.STATICFILES_URL + 'js/jquery.tmpl.js', settings.STATICFILES_URL + 'js/json-table.js', settings.STATICFILES_URL + 'js/json-table-templates.js', )
Use staticfiles instead of staticmedia
Use staticfiles instead of staticmedia
Python
bsd-3-clause
SideStudios/django-jsonfield,chrismeyersfsu/django-jsonfield
from django import forms from django.utils import simplejson as json import staticmedia class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( staticmedia.url('js/jquery.js'), staticmedia.url('js/jquery.tmpl.js'), staticmedia.url('js/json-table.js'), staticmedia.url('js/json-table-templates.js'), )Use staticfiles instead of staticmedia
from django import forms from django.utils import simplejson as json from django.conf import settings class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( settings.STATICFILES_URL + 'js/jquery.js', settings.STATICFILES_URL + 'js/jquery.tmpl.js', settings.STATICFILES_URL + 'js/json-table.js', settings.STATICFILES_URL + 'js/json-table-templates.js', )
<commit_before>from django import forms from django.utils import simplejson as json import staticmedia class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( staticmedia.url('js/jquery.js'), staticmedia.url('js/jquery.tmpl.js'), staticmedia.url('js/json-table.js'), staticmedia.url('js/json-table-templates.js'), )<commit_msg>Use staticfiles instead of staticmedia<commit_after>
from django import forms from django.utils import simplejson as json from django.conf import settings class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( settings.STATICFILES_URL + 'js/jquery.js', settings.STATICFILES_URL + 'js/jquery.tmpl.js', settings.STATICFILES_URL + 'js/json-table.js', settings.STATICFILES_URL + 'js/json-table-templates.js', )
from django import forms from django.utils import simplejson as json import staticmedia class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( staticmedia.url('js/jquery.js'), staticmedia.url('js/jquery.tmpl.js'), staticmedia.url('js/json-table.js'), staticmedia.url('js/json-table-templates.js'), )Use staticfiles instead of staticmediafrom django import forms from django.utils import simplejson as json from django.conf import settings class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( settings.STATICFILES_URL + 'js/jquery.js', settings.STATICFILES_URL + 'js/jquery.tmpl.js', settings.STATICFILES_URL + 'js/json-table.js', settings.STATICFILES_URL + 'js/json-table-templates.js', )
<commit_before>from django import forms from django.utils import simplejson as json import staticmedia class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( staticmedia.url('js/jquery.js'), staticmedia.url('js/jquery.tmpl.js'), staticmedia.url('js/json-table.js'), staticmedia.url('js/json-table-templates.js'), )<commit_msg>Use staticfiles instead of staticmedia<commit_after>from django import forms from django.utils import simplejson as json from django.conf import settings class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( settings.STATICFILES_URL + 'js/jquery.js', settings.STATICFILES_URL + 'js/jquery.tmpl.js', settings.STATICFILES_URL + 'js/json-table.js', settings.STATICFILES_URL + 'js/json-table-templates.js', )
c13208dcc4fe1715db10d86e4dfd584c18f396fa
sympy/calculus/singularities.py
sympy/calculus/singularities.py
from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym)))
from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
Replace solve with solveset in sympy.calculus
Replace solve with solveset in sympy.calculus
Python
bsd-3-clause
skidzo/sympy,chaffra/sympy,pandeyadarsh/sympy,VaibhavAgarwalVA/sympy,abhiii5459/sympy,jbbskinny/sympy,aktech/sympy,lindsayad/sympy,kevalds51/sympy,Titan-C/sympy,hargup/sympy,yukoba/sympy,farhaanbukhsh/sympy,moble/sympy,emon10005/sympy,bukzor/sympy,sahmed95/sympy,mafiya69/sympy,kaushik94/sympy,VaibhavAgarwalVA/sympy,jbbskinny/sympy,cswiercz/sympy,moble/sympy,lindsayad/sympy,Titan-C/sympy,bukzor/sympy,jaimahajan1997/sympy,abhiii5459/sympy,sampadsaha5/sympy,saurabhjn76/sympy,yukoba/sympy,aktech/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,wyom/sympy,postvakje/sympy,iamutkarshtiwari/sympy,wyom/sympy,yashsharan/sympy,ga7g08/sympy,debugger22/sympy,ahhda/sympy,grevutiu-gabriel/sympy,oliverlee/sympy,chaffra/sympy,Designist/sympy,yashsharan/sympy,Davidjohnwilson/sympy,drufat/sympy,skidzo/sympy,oliverlee/sympy,jerli/sympy,AkademieOlympia/sympy,debugger22/sympy,kaushik94/sympy,Shaswat27/sympy,VaibhavAgarwalVA/sympy,ChristinaZografou/sympy,iamutkarshtiwari/sympy,wyom/sympy,ahhda/sympy,mcdaniel67/sympy,atreyv/sympy,kaichogami/sympy,Davidjohnwilson/sympy,abhiii5459/sympy,postvakje/sympy,Arafatk/sympy,Titan-C/sympy,pandeyadarsh/sympy,cswiercz/sympy,wanglongqi/sympy,Shaswat27/sympy,drufat/sympy,maniteja123/sympy,rahuldan/sympy,jerli/sympy,Arafatk/sympy,iamutkarshtiwari/sympy,MechCoder/sympy,farhaanbukhsh/sympy,Arafatk/sympy,madan96/sympy,souravsingh/sympy,chaffra/sympy,wanglongqi/sympy,ChristinaZografou/sympy,madan96/sympy,shikil/sympy,Designist/sympy,sahmed95/sympy,ahhda/sympy,jbbskinny/sympy,Curious72/sympy,souravsingh/sympy,postvakje/sympy,lindsayad/sympy,kevalds51/sympy,sampadsaha5/sympy,jaimahajan1997/sympy,rahuldan/sympy,mcdaniel67/sympy,yukoba/sympy,kumarkrishna/sympy,souravsingh/sympy,MechCoder/sympy,ga7g08/sympy,bukzor/sympy,oliverlee/sympy,Curious72/sympy,grevutiu-gabriel/sympy,MechCoder/sympy,sahmed95/sympy,Vishluck/sympy,shikil/sympy,atreyv/sympy,emon10005/sympy,mafiya69/sympy,kaushik94/sympy,pandeyadarsh/sympy,Curious72/sympy,saurabhjn76/sympy,kaichogami/sympy,cswiercz/sympy,aktech/sympy,sampadsaha5/sympy,maniteja123/sympy,atreyv/sympy,ga7g08/sympy,Davidjohnwilson/sympy,shikil/sympy,moble/sympy,AkademieOlympia/sympy,kaichogami/sympy,skidzo/sympy,emon10005/sympy,grevutiu-gabriel/sympy,Vishluck/sympy,hargup/sympy,debugger22/sympy,wanglongqi/sympy,Shaswat27/sympy,Vishluck/sympy,kevalds51/sympy,AkademieOlympia/sympy,farhaanbukhsh/sympy,Designist/sympy,rahuldan/sympy,maniteja123/sympy,kumarkrishna/sympy,jaimahajan1997/sympy,yashsharan/sympy,kumarkrishna/sympy,jerli/sympy,madan96/sympy,mafiya69/sympy,saurabhjn76/sympy,drufat/sympy,hargup/sympy
from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym))) Replace solve with solveset in sympy.calculus
from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
<commit_before>from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym))) <commit_msg>Replace solve with solveset in sympy.calculus<commit_after>
from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym))) Replace solve with solveset in sympy.calculusfrom sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
<commit_before>from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym))) <commit_msg>Replace solve with solveset in sympy.calculus<commit_after>from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
b71f3c726aa6bde4ab0e2b471c5cb9064abfb3fa
apps/webdriver_testing/api_v2/test_user_resources.py
apps/webdriver_testing/api_v2/test_user_resources.py
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) users = user_data['objects'] print '#######' for x in users: print x['username']
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users/' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) print user_data['username']
Fix webdriver user creation bug
Fix webdriver user creation bug
Python
agpl-3.0
eloquence/unisubs,ofer43211/unisubs,eloquence/unisubs,eloquence/unisubs,pculture/unisubs,norayr/unisubs,pculture/unisubs,norayr/unisubs,wevoice/wesub,pculture/unisubs,ujdhesa/unisubs,ofer43211/unisubs,ujdhesa/unisubs,pculture/unisubs,ReachingOut/unisubs,ofer43211/unisubs,wevoice/wesub,ujdhesa/unisubs,wevoice/wesub,ReachingOut/unisubs,wevoice/wesub,ofer43211/unisubs,ReachingOut/unisubs,eloquence/unisubs,norayr/unisubs,ReachingOut/unisubs,norayr/unisubs,ujdhesa/unisubs
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) users = user_data['objects'] print '#######' for x in users: print x['username'] Fix webdriver user creation bug
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users/' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) print user_data['username']
<commit_before>from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) users = user_data['objects'] print '#######' for x in users: print x['username'] <commit_msg>Fix webdriver user creation bug<commit_after>
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users/' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) print user_data['username']
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) users = user_data['objects'] print '#######' for x in users: print x['username'] Fix webdriver user creation bugfrom apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users/' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) print user_data['username']
<commit_before>from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) users = user_data['objects'] print '#######' for x in users: print x['username'] <commit_msg>Fix webdriver user creation bug<commit_after>from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users/' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) print user_data['username']
fff33f238d840b89350d50e2349af8f60f298a2a
openprescribing/openprescribing/settings/test.py
openprescribing/openprescribing/settings/test.py
from __future__ import absolute_import import os from .local import * EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '../log/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2'
from __future__ import absolute_import import os from .base import * DEBUG = True TEMPLATES[0]['OPTIONS']['debug'] = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': utils.get_env_setting('DB_NAME'), 'USER': utils.get_env_setting('DB_USER'), 'PASSWORD': utils.get_env_setting('DB_PASS'), 'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1') } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } INTERNAL_IPS = ('127.0.0.1',) ANYMAIL = { "MAILGUN_API_KEY": "key-b503fcc6f1c029088f2b3f9b3faa303c", "MAILGUN_SENDER_DOMAIN": "staging.openprescribing.net", "WEBHOOK_AUTHORIZATION": "%s" % utils.get_env_setting( 'MAILGUN_WEBHOOK_AUTH_STRING', 'example:foo'), } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '/tmp/asdog/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2'
Test settings which don't depend on local ones
Test settings which don't depend on local ones By overriding local settings we were cargo-culting things we didn't want. Prefer explicit settings (still using the `base.py` environment as a starting point)
Python
mit
annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc
from __future__ import absolute_import import os from .local import * EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '../log/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2' Test settings which don't depend on local ones By overriding local settings we were cargo-culting things we didn't want. Prefer explicit settings (still using the `base.py` environment as a starting point)
from __future__ import absolute_import import os from .base import * DEBUG = True TEMPLATES[0]['OPTIONS']['debug'] = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': utils.get_env_setting('DB_NAME'), 'USER': utils.get_env_setting('DB_USER'), 'PASSWORD': utils.get_env_setting('DB_PASS'), 'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1') } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } INTERNAL_IPS = ('127.0.0.1',) ANYMAIL = { "MAILGUN_API_KEY": "key-b503fcc6f1c029088f2b3f9b3faa303c", "MAILGUN_SENDER_DOMAIN": "staging.openprescribing.net", "WEBHOOK_AUTHORIZATION": "%s" % utils.get_env_setting( 'MAILGUN_WEBHOOK_AUTH_STRING', 'example:foo'), } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '/tmp/asdog/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2'
<commit_before>from __future__ import absolute_import import os from .local import * EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '../log/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2' <commit_msg>Test settings which don't depend on local ones By overriding local settings we were cargo-culting things we didn't want. Prefer explicit settings (still using the `base.py` environment as a starting point)<commit_after>
from __future__ import absolute_import import os from .base import * DEBUG = True TEMPLATES[0]['OPTIONS']['debug'] = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': utils.get_env_setting('DB_NAME'), 'USER': utils.get_env_setting('DB_USER'), 'PASSWORD': utils.get_env_setting('DB_PASS'), 'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1') } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } INTERNAL_IPS = ('127.0.0.1',) ANYMAIL = { "MAILGUN_API_KEY": "key-b503fcc6f1c029088f2b3f9b3faa303c", "MAILGUN_SENDER_DOMAIN": "staging.openprescribing.net", "WEBHOOK_AUTHORIZATION": "%s" % utils.get_env_setting( 'MAILGUN_WEBHOOK_AUTH_STRING', 'example:foo'), } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '/tmp/asdog/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2'
from __future__ import absolute_import import os from .local import * EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '../log/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2' Test settings which don't depend on local ones By overriding local settings we were cargo-culting things we didn't want. Prefer explicit settings (still using the `base.py` environment as a starting point)from __future__ import absolute_import import os from .base import * DEBUG = True TEMPLATES[0]['OPTIONS']['debug'] = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': utils.get_env_setting('DB_NAME'), 'USER': utils.get_env_setting('DB_USER'), 'PASSWORD': utils.get_env_setting('DB_PASS'), 'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1') } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } INTERNAL_IPS = ('127.0.0.1',) ANYMAIL = { "MAILGUN_API_KEY": "key-b503fcc6f1c029088f2b3f9b3faa303c", "MAILGUN_SENDER_DOMAIN": "staging.openprescribing.net", "WEBHOOK_AUTHORIZATION": "%s" % utils.get_env_setting( 'MAILGUN_WEBHOOK_AUTH_STRING', 'example:foo'), } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '/tmp/asdog/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2'
<commit_before>from __future__ import absolute_import import os from .local import * EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '../log/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2' <commit_msg>Test settings which don't depend on local ones By overriding local settings we were cargo-culting things we didn't want. Prefer explicit settings (still using the `base.py` environment as a starting point)<commit_after>from __future__ import absolute_import import os from .base import * DEBUG = True TEMPLATES[0]['OPTIONS']['debug'] = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.contrib.gis.db.backends.postgis', 'NAME': utils.get_env_setting('DB_NAME'), 'USER': utils.get_env_setting('DB_USER'), 'PASSWORD': utils.get_env_setting('DB_PASS'), 'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1') } } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', } } INTERNAL_IPS = ('127.0.0.1',) ANYMAIL = { "MAILGUN_API_KEY": "key-b503fcc6f1c029088f2b3f9b3faa303c", "MAILGUN_SENDER_DOMAIN": "staging.openprescribing.net", "WEBHOOK_AUTHORIZATION": "%s" % utils.get_env_setting( 'MAILGUN_WEBHOOK_AUTH_STRING', 'example:foo'), } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' if 'TRAVIS' not in os.environ: LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'handlers': { 'file': { 'level': 'DEBUG', 'class': 'logging.FileHandler', 'filename': '/tmp/asdog/test-debug.log', }, }, 'loggers': { 'django': { 'handlers': ['file'], 'level': 'DEBUG', 'propagate': True, }, }, } # For grabbing images that we insert into alert emails GRAB_HOST = "http://localhost" # This is the same as the dev/local one GOOGLE_TRACKING_ID = 'UA-62480003-2'
236ad637e05ab8ff48b7c169dd54228e48470e1b
mediacloud/mediawords/util/test_sql.py
mediacloud/mediawords/util/test_sql.py
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') assert get_sql_date_from_epoch(0) == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') # noinspection PyTypeChecker assert get_sql_date_from_epoch('badger') == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
Add some more unit tests for get_sql_date_from_epoch()
Add some more unit tests for get_sql_date_from_epoch()
Python
agpl-3.0
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14' Add some more unit tests for get_sql_date_from_epoch()
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') assert get_sql_date_from_epoch(0) == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') # noinspection PyTypeChecker assert get_sql_date_from_epoch('badger') == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
<commit_before>from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14' <commit_msg>Add some more unit tests for get_sql_date_from_epoch()<commit_after>
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') assert get_sql_date_from_epoch(0) == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') # noinspection PyTypeChecker assert get_sql_date_from_epoch('badger') == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14' Add some more unit tests for get_sql_date_from_epoch()from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') assert get_sql_date_from_epoch(0) == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') # noinspection PyTypeChecker assert get_sql_date_from_epoch('badger') == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
<commit_before>from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14' <commit_msg>Add some more unit tests for get_sql_date_from_epoch()<commit_after>from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') assert get_sql_date_from_epoch(0) == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') # noinspection PyTypeChecker assert get_sql_date_from_epoch('badger') == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
d2991a6385be74debf71eb8404e362c6027e6d50
molecule/default/tests/test_default.py
molecule/default/tests/test_default.py
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0 assert host.command('Xorg -version').rc == 0
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0
Remove redundant xorg command test
Remove redundant xorg command test
Python
mit
nephelaiio/ansible-role-i3,nephelaiio/ansible-role-i3
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0 assert host.command('Xorg -version').rc == 0 Remove redundant xorg command test
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0
<commit_before>import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0 assert host.command('Xorg -version').rc == 0 <commit_msg>Remove redundant xorg command test<commit_after>
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0 assert host.command('Xorg -version').rc == 0 Remove redundant xorg command testimport os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0
<commit_before>import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0 assert host.command('Xorg -version').rc == 0 <commit_msg>Remove redundant xorg command test<commit_after>import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0
50e62304276c1daa2d0ef03b094f4c444ff995e1
openassessment/workflow/serializers.py
openassessment/workflow/serializers.py
""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.Field(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', )
""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.ReadOnlyField(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', )
Fix display of page after score override.
Fix display of page after score override.
Python
agpl-3.0
Stanford-Online/edx-ora2,Stanford-Online/edx-ora2,Stanford-Online/edx-ora2,Stanford-Online/edx-ora2
""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.Field(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', ) Fix display of page after score override.
""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.ReadOnlyField(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', )
<commit_before>""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.Field(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', ) <commit_msg>Fix display of page after score override.<commit_after>
""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.ReadOnlyField(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', )
""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.Field(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', ) Fix display of page after score override.""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.ReadOnlyField(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', )
<commit_before>""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.Field(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', ) <commit_msg>Fix display of page after score override.<commit_after>""" Serializers are created to ensure models do not have to be accessed outside the scope of the ORA2 APIs. """ from rest_framework import serializers from openassessment.workflow.models import AssessmentWorkflow, AssessmentWorkflowCancellation class AssessmentWorkflowSerializer(serializers.ModelSerializer): score = serializers.ReadOnlyField(required=False) override_score = serializers.ReadOnlyField(required=False) class Meta: model = AssessmentWorkflow fields = ( 'uuid', 'submission_uuid', 'status', 'created', 'modified', # Computed 'override_score', 'score' ) class AssessmentWorkflowCancellationSerializer(serializers.ModelSerializer): """ Serialize a `AssessmentWorkflowCancellation` model. """ class Meta: model = AssessmentWorkflowCancellation fields = ( 'comments', 'cancelled_by_id', 'created_at', )
325902c169424ec76307efa71a2e4885180e5cbb
tests/integration/shell/call.py
tests/integration/shell/call.py
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner, skipIf import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') def test_user_delete_kw_output(self): ret = self.run_call('-d user.delete') self.assertIn( 'salt \'*\' user.delete name remove=True force=True', ''.join(ret) ) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
Test to make sure we're outputting kwargs on the user.delete documentation.
Test to make sure we're outputting kwargs on the user.delete documentation.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())Test to make sure we're outputting kwargs on the user.delete documentation.
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner, skipIf import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') def test_user_delete_kw_output(self): ret = self.run_call('-d user.delete') self.assertIn( 'salt \'*\' user.delete name remove=True force=True', ''.join(ret) ) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
<commit_before># -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())<commit_msg>Test to make sure we're outputting kwargs on the user.delete documentation.<commit_after>
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner, skipIf import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') def test_user_delete_kw_output(self): ret = self.run_call('-d user.delete') self.assertIn( 'salt \'*\' user.delete name remove=True force=True', ''.join(ret) ) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())Test to make sure we're outputting kwargs on the user.delete documentation.# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner, skipIf import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') def test_user_delete_kw_output(self): ret = self.run_call('-d user.delete') self.assertIn( 'salt \'*\' user.delete name remove=True force=True', ''.join(ret) ) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
<commit_before># -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())<commit_msg>Test to make sure we're outputting kwargs on the user.delete documentation.<commit_after># -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner, skipIf import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') def test_user_delete_kw_output(self): ret = self.run_call('-d user.delete') self.assertIn( 'salt \'*\' user.delete name remove=True force=True', ''.join(ret) ) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
981bac39056584ec9c16e5a8d0f7a972d7365a3f
tests/test_module_dispatcher.py
tests/test_module_dispatcher.py
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError): # The following allows us to introspect the exception object try: hosts.all.a_module_that_most_certainly_does_not_exist() except AnsibleModuleError, e: assert e.message == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist") raise else: pytest.fail("pytest_ansible.errors.AnsibleModuleError was not raised as expected")
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError) as exc_info: hosts.all.a_module_that_most_certainly_does_not_exist() assert str(exc_info.value) == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist")
Use more prefered exc_info inspection technique
Use more prefered exc_info inspection technique
Python
mit
jlaska/pytest-ansible
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError): # The following allows us to introspect the exception object try: hosts.all.a_module_that_most_certainly_does_not_exist() except AnsibleModuleError, e: assert e.message == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist") raise else: pytest.fail("pytest_ansible.errors.AnsibleModuleError was not raised as expected") Use more prefered exc_info inspection technique
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError) as exc_info: hosts.all.a_module_that_most_certainly_does_not_exist() assert str(exc_info.value) == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist")
<commit_before>import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError): # The following allows us to introspect the exception object try: hosts.all.a_module_that_most_certainly_does_not_exist() except AnsibleModuleError, e: assert e.message == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist") raise else: pytest.fail("pytest_ansible.errors.AnsibleModuleError was not raised as expected") <commit_msg>Use more prefered exc_info inspection technique<commit_after>
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError) as exc_info: hosts.all.a_module_that_most_certainly_does_not_exist() assert str(exc_info.value) == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist")
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError): # The following allows us to introspect the exception object try: hosts.all.a_module_that_most_certainly_does_not_exist() except AnsibleModuleError, e: assert e.message == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist") raise else: pytest.fail("pytest_ansible.errors.AnsibleModuleError was not raised as expected") Use more prefered exc_info inspection techniqueimport pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError) as exc_info: hosts.all.a_module_that_most_certainly_does_not_exist() assert str(exc_info.value) == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist")
<commit_before>import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError): # The following allows us to introspect the exception object try: hosts.all.a_module_that_most_certainly_does_not_exist() except AnsibleModuleError, e: assert e.message == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist") raise else: pytest.fail("pytest_ansible.errors.AnsibleModuleError was not raised as expected") <commit_msg>Use more prefered exc_info inspection technique<commit_after>import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError) as exc_info: hosts.all.a_module_that_most_certainly_does_not_exist() assert str(exc_info.value) == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist")
9a1c34c7b3ff4de9dda7d8dbf6fb3234a40dc0b1
src/sas/sasview/__init__.py
src/sas/sasview/__init__.py
from distutils.version import StrictVersion __version__ = "5.0.5a1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT"
from distutils.version import StrictVersion __version__ = "5.0.5-alpha.1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT"
Revert the version string fix, so the proper fix can be merged without conflict
Revert the version string fix, so the proper fix can be merged without conflict
Python
bsd-3-clause
SasView/sasview,SasView/sasview,SasView/sasview,SasView/sasview,SasView/sasview,SasView/sasview
from distutils.version import StrictVersion __version__ = "5.0.5a1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT" Revert the version string fix, so the proper fix can be merged without conflict
from distutils.version import StrictVersion __version__ = "5.0.5-alpha.1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT"
<commit_before>from distutils.version import StrictVersion __version__ = "5.0.5a1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT" <commit_msg>Revert the version string fix, so the proper fix can be merged without conflict<commit_after>
from distutils.version import StrictVersion __version__ = "5.0.5-alpha.1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT"
from distutils.version import StrictVersion __version__ = "5.0.5a1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT" Revert the version string fix, so the proper fix can be merged without conflictfrom distutils.version import StrictVersion __version__ = "5.0.5-alpha.1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT"
<commit_before>from distutils.version import StrictVersion __version__ = "5.0.5a1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT" <commit_msg>Revert the version string fix, so the proper fix can be merged without conflict<commit_after>from distutils.version import StrictVersion __version__ = "5.0.5-alpha.1" StrictVersion(__version__) __DOI__ = "Zenodo, DOI:10.5281/zenodo.4467703" __release_date__ = "2021" __build__ = "GIT_COMMIT"
ba564c7e2cacc8609d52f03e501786be3c7c8f44
tests/config.py
tests/config.py
import sys sys.path.append("../ideascaly") from ideascaly.auth import AuthNonSSO from ideascaly.api import API import ConfigParser import unittest config = ConfigParser.ConfigParser() config.read('config') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = config.get('test', 'community_url') def create_auth(): auth = AuthNonSSO(config.get('test', 'token')) return auth
import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
Change the way used to read the testing information
Change the way used to read the testing information
Python
mit
joausaga/ideascaly
import sys sys.path.append("../ideascaly") from ideascaly.auth import AuthNonSSO from ideascaly.api import API import ConfigParser import unittest config = ConfigParser.ConfigParser() config.read('config') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = config.get('test', 'community_url') def create_auth(): auth = AuthNonSSO(config.get('test', 'token')) return authChange the way used to read the testing information
import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
<commit_before>import sys sys.path.append("../ideascaly") from ideascaly.auth import AuthNonSSO from ideascaly.api import API import ConfigParser import unittest config = ConfigParser.ConfigParser() config.read('config') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = config.get('test', 'community_url') def create_auth(): auth = AuthNonSSO(config.get('test', 'token')) return auth<commit_msg>Change the way used to read the testing information<commit_after>
import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
import sys sys.path.append("../ideascaly") from ideascaly.auth import AuthNonSSO from ideascaly.api import API import ConfigParser import unittest config = ConfigParser.ConfigParser() config.read('config') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = config.get('test', 'community_url') def create_auth(): auth = AuthNonSSO(config.get('test', 'token')) return authChange the way used to read the testing informationimport sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
<commit_before>import sys sys.path.append("../ideascaly") from ideascaly.auth import AuthNonSSO from ideascaly.api import API import ConfigParser import unittest config = ConfigParser.ConfigParser() config.read('config') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = config.get('test', 'community_url') def create_auth(): auth = AuthNonSSO(config.get('test', 'token')) return auth<commit_msg>Change the way used to read the testing information<commit_after>import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
d07109e07e4d9fab488dfbbcf56fdfe18baa56ab
lib/python/plow/test/test_static.py
lib/python/plow/test/test_static.py
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.findJobs() if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.getJobs() def testGetGroupedJobs(self): result = [ {"id": 1, "parent":0, "name": "High"}, {"id": 2, "parent":1, "name": "Foo"} ] for p in result: print p if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
Set the column count value based on size of header list.
Set the column count value based on size of header list.
Python
apache-2.0
Br3nda/plow,Br3nda/plow,chadmv/plow,Br3nda/plow,chadmv/plow,chadmv/plow,Br3nda/plow,Br3nda/plow,chadmv/plow,chadmv/plow,chadmv/plow,chadmv/plow
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.findJobs() if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite) Set the column count value based on size of header list.
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.getJobs() def testGetGroupedJobs(self): result = [ {"id": 1, "parent":0, "name": "High"}, {"id": 2, "parent":1, "name": "Foo"} ] for p in result: print p if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
<commit_before>import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.findJobs() if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite) <commit_msg>Set the column count value based on size of header list.<commit_after>
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.getJobs() def testGetGroupedJobs(self): result = [ {"id": 1, "parent":0, "name": "High"}, {"id": 2, "parent":1, "name": "Foo"} ] for p in result: print p if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.findJobs() if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite) Set the column count value based on size of header list.import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.getJobs() def testGetGroupedJobs(self): result = [ {"id": 1, "parent":0, "name": "High"}, {"id": 2, "parent":1, "name": "Foo"} ] for p in result: print p if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
<commit_before>import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.findJobs() if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite) <commit_msg>Set the column count value based on size of header list.<commit_after>import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.getJobs() def testGetGroupedJobs(self): result = [ {"id": 1, "parent":0, "name": "High"}, {"id": 2, "parent":1, "name": "Foo"} ] for p in result: print p if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
189c7a7c982739cd7a3026e34a9969ea9278a12b
api/data/src/lib/middleware.py
api/data/src/lib/middleware.py
import os import re class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT'): request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
import os class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']: request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
Fix so we can do :5000 queries from api container
Fix so we can do :5000 queries from api container
Python
mit
xeor/hohu,xeor/hohu,xeor/hohu,xeor/hohu
import os import re class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT'): request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response Fix so we can do :5000 queries from api container
import os class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']: request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
<commit_before>import os import re class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT'): request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response <commit_msg>Fix so we can do :5000 queries from api container<commit_after>
import os class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']: request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
import os import re class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT'): request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response Fix so we can do :5000 queries from api containerimport os class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']: request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
<commit_before>import os import re class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT'): request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response <commit_msg>Fix so we can do :5000 queries from api container<commit_after>import os class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']: request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
e0c70b2b20349b8f1c0f6df8cc641c3267a63a06
crypto.py
crypto.py
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) f = open('key.pem', 'w') f.write(key.exportKey('PEM')) f.close() # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): f = open('key.pem', 'r') key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key_obj = retrieve_key() return key_obj.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) with open('key.pem', 'w') as f: f.write(key.exportKey('PEM')) # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): with open('key.pem', 'r') as f: key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key = retrieve_key() return key.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
Use 'with ... as ...' for file opening. Standardize variable names.
Use 'with ... as ...' for file opening. Standardize variable names.
Python
mit
Tribler/decentral-market
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) f = open('key.pem', 'w') f.write(key.exportKey('PEM')) f.close() # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): f = open('key.pem', 'r') key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key_obj = retrieve_key() return key_obj.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)Use 'with ... as ...' for file opening. Standardize variable names.
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) with open('key.pem', 'w') as f: f.write(key.exportKey('PEM')) # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): with open('key.pem', 'r') as f: key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key = retrieve_key() return key.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
<commit_before>""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) f = open('key.pem', 'w') f.write(key.exportKey('PEM')) f.close() # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): f = open('key.pem', 'r') key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key_obj = retrieve_key() return key_obj.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)<commit_msg>Use 'with ... as ...' for file opening. Standardize variable names.<commit_after>
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) with open('key.pem', 'w') as f: f.write(key.exportKey('PEM')) # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): with open('key.pem', 'r') as f: key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key = retrieve_key() return key.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) f = open('key.pem', 'w') f.write(key.exportKey('PEM')) f.close() # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): f = open('key.pem', 'r') key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key_obj = retrieve_key() return key_obj.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)Use 'with ... as ...' for file opening. Standardize variable names.""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) with open('key.pem', 'w') as f: f.write(key.exportKey('PEM')) # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): with open('key.pem', 'r') as f: key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key = retrieve_key() return key.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
<commit_before>""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) f = open('key.pem', 'w') f.write(key.exportKey('PEM')) f.close() # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): f = open('key.pem', 'r') key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key_obj = retrieve_key() return key_obj.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)<commit_msg>Use 'with ... as ...' for file opening. Standardize variable names.<commit_after>""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) with open('key.pem', 'w') as f: f.write(key.exportKey('PEM')) # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): with open('key.pem', 'r') as f: key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key = retrieve_key() return key.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
ca0b10484ca92709be4b30b2aab7079a2f4a2fe1
tracker.py
tracker.py
import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get Hashtag to track argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag])
import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get String to track on argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag])
Change comments to make sense
Change comments to make sense
Python
mit
tim-thompson/TweetTimeTracker
import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get Hashtag to track argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag]) Change comments to make sense
import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get String to track on argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag])
<commit_before>import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get Hashtag to track argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag]) <commit_msg>Change comments to make sense<commit_after>
import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get String to track on argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag])
import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get Hashtag to track argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag]) Change comments to make senseimport sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get String to track on argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag])
<commit_before>import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get Hashtag to track argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag]) <commit_msg>Change comments to make sense<commit_after>import sys from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream import tweepy #Get String to track on argTag = sys.argv[1] #Class for listening to all tweets class TweetListener(StreamListener): def on_status(self, status): print status.created_at #Write timestamp to file f = open("logs/" + argTag + ".txt", "a") f.write(str(status.created_at) + "\n") f.close() return True def on_error(self, status): print status if __name__ == '__main__': listener = TweetListener() #Keys CONSUMER_KEY = '' CONSUMER_SECRET = '' ACCESS_KEY = '' ACCESS_SECRET = '' #Initialise and Authorise auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) stream = Stream(auth, listener) stream.filter(track = [argTag])
3b568b0343e1cbf9256caa181c672449faf01ddc
pavement.py
pavement.py
from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv")
from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../lib/google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../lib/google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv")
Adjust path to GAE DSK
Adjust path to GAE DSK
Python
bsd-3-clause
amarandon/zongo-engine,amarandon/zongo-engine,amarandon/zongo-engine
from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv") Adjust path to GAE DSK
from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../lib/google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../lib/google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv")
<commit_before>from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv") <commit_msg>Adjust path to GAE DSK<commit_after>
from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../lib/google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../lib/google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv")
from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv") Adjust path to GAE DSKfrom paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../lib/google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../lib/google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv")
<commit_before>from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv") <commit_msg>Adjust path to GAE DSK<commit_after>from paver.easy import * import paver.doctools from paver.setuputils import setup @task def dumpdb(): print "Downloading data from App Engine" sh("python2.5 ../../lib/google_appengine/bulkloader.py --dump --app_id=zongo --email=alex.marandon@gmail.com --url=http://www.zongosound.com/remote_api --filename=data.csv") @task def restoredb(): print "Loading data to development datastore." sh("python2.5 ../../lib/google_appengine/bulkloader.py --app_id=zongo --batch_size=1 --restore --url=http://localhost:8080/remote_api --email=admin --filename=data.csv")
f75e245f461e57cc868ee5452c88aea92b6681bf
chainer/functions/parameter.py
chainer/functions/parameter.py
import numpy from chainer import function class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
import numpy from chainer import function from chainer.utils import type_check class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 0) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
Add typecheck to Parameter function
Add typecheck to Parameter function
Python
mit
t-abe/chainer,chainer/chainer,pfnet/chainer,jnishi/chainer,sou81821/chainer,ikasumi/chainer,tigerneil/chainer,delta2323/chainer,1986ks/chainer,chainer/chainer,yanweifu/chainer,ronekko/chainer,muupan/chainer,truongdq/chainer,chainer/chainer,muupan/chainer,okuta/chainer,jnishi/chainer,anaruse/chainer,hvy/chainer,cupy/cupy,cupy/cupy,truongdq/chainer,ktnyt/chainer,keisuke-umezawa/chainer,jfsantos/chainer,ytoyama/yans_chainer_hackathon,masia02/chainer,niboshi/chainer,jnishi/chainer,kikusu/chainer,AlpacaDB/chainer,woodshop/chainer,keisuke-umezawa/chainer,jnishi/chainer,ktnyt/chainer,cupy/cupy,elviswf/chainer,kiyukuta/chainer,tkerola/chainer,Kaisuke5/chainer,keisuke-umezawa/chainer,rezoo/chainer,niboshi/chainer,kuwa32/chainer,kikusu/chainer,wavelets/chainer,benob/chainer,okuta/chainer,AlpacaDB/chainer,hidenori-t/chainer,okuta/chainer,woodshop/complex-chainer,wkentaro/chainer,t-abe/chainer,okuta/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,cemoody/chainer,ktnyt/chainer,benob/chainer,niboshi/chainer,wkentaro/chainer,laysakura/chainer,niboshi/chainer,tscohen/chainer,hvy/chainer,ysekky/chainer,cupy/cupy,hvy/chainer,aonotas/chainer,chainer/chainer,sinhrks/chainer,ktnyt/chainer,kashif/chainer,wkentaro/chainer,sinhrks/chainer,minhpqn/chainer,umitanuki/chainer
import numpy from chainer import function class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return () Add typecheck to Parameter function
import numpy from chainer import function from chainer.utils import type_check class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 0) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
<commit_before>import numpy from chainer import function class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return () <commit_msg>Add typecheck to Parameter function<commit_after>
import numpy from chainer import function from chainer.utils import type_check class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 0) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
import numpy from chainer import function class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return () Add typecheck to Parameter functionimport numpy from chainer import function from chainer.utils import type_check class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 0) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
<commit_before>import numpy from chainer import function class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return () <commit_msg>Add typecheck to Parameter function<commit_after>import numpy from chainer import function from chainer.utils import type_check class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 0) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
3cf9473bdf1714460478b4cd36a54b09b2a57173
lib/feedeater/validate.py
lib/feedeater/validate.py
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator(validator, report=report) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def __init__(self, *args, **kwargs): super(FeedEaterValidate, self).__init__(*args, **kwargs) self.feedvalidator = kwargs.get('feedvalidator') def parser(self): parser = super(FeedEaterValidate, self).parser() parser.add_argument( '--feedvalidator', help='Path to feedvalidator.py' ) return parser def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
Add --feedvaldiator option to validator
Add --feedvaldiator option to validator
Python
mit
transitland/transitland-datastore,transitland/transitland-datastore,transitland/transitland-datastore,brechtvdv/transitland-datastore,brechtvdv/transitland-datastore,brechtvdv/transitland-datastore
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator(validator, report=report) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run() Add --feedvaldiator option to validator
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def __init__(self, *args, **kwargs): super(FeedEaterValidate, self).__init__(*args, **kwargs) self.feedvalidator = kwargs.get('feedvalidator') def parser(self): parser = super(FeedEaterValidate, self).parser() parser.add_argument( '--feedvalidator', help='Path to feedvalidator.py' ) return parser def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
<commit_before>"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator(validator, report=report) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run() <commit_msg>Add --feedvaldiator option to validator<commit_after>
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def __init__(self, *args, **kwargs): super(FeedEaterValidate, self).__init__(*args, **kwargs) self.feedvalidator = kwargs.get('feedvalidator') def parser(self): parser = super(FeedEaterValidate, self).parser() parser.add_argument( '--feedvalidator', help='Path to feedvalidator.py' ) return parser def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator(validator, report=report) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run() Add --feedvaldiator option to validator"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def __init__(self, *args, **kwargs): super(FeedEaterValidate, self).__init__(*args, **kwargs) self.feedvalidator = kwargs.get('feedvalidator') def parser(self): parser = super(FeedEaterValidate, self).parser() parser.add_argument( '--feedvalidator', help='Path to feedvalidator.py' ) return parser def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
<commit_before>"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator(validator, report=report) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run() <commit_msg>Add --feedvaldiator option to validator<commit_after>"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def __init__(self, *args, **kwargs): super(FeedEaterValidate, self).__init__(*args, **kwargs) self.feedvalidator = kwargs.get('feedvalidator') def parser(self): parser = super(FeedEaterValidate, self).parser() parser.add_argument( '--feedvalidator', help='Path to feedvalidator.py' ) return parser def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
d1174017c6b282aa1d808b784ffde8a3d3190472
fabfile.py
fabfile.py
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * env.roledefs = hostinfo.load_roledefs()
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * try: open("roledefs.pickle") env.roledefs = hostinfo.load_roledefs() except IOError: pass
Allow running tasks even if roledefs.pickle is missing
Allow running tasks even if roledefs.pickle is missing Signed-off-by: Samuli Seppänen <be49b59234361de284476e9a2215fb6477f46673@openvpn.net>
Python
bsd-2-clause
mattock/fabric,mattock/fabric
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * env.roledefs = hostinfo.load_roledefs() Allow running tasks even if roledefs.pickle is missing Signed-off-by: Samuli Seppänen <be49b59234361de284476e9a2215fb6477f46673@openvpn.net>
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * try: open("roledefs.pickle") env.roledefs = hostinfo.load_roledefs() except IOError: pass
<commit_before># -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * env.roledefs = hostinfo.load_roledefs() <commit_msg>Allow running tasks even if roledefs.pickle is missing Signed-off-by: Samuli Seppänen <be49b59234361de284476e9a2215fb6477f46673@openvpn.net><commit_after>
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * try: open("roledefs.pickle") env.roledefs = hostinfo.load_roledefs() except IOError: pass
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * env.roledefs = hostinfo.load_roledefs() Allow running tasks even if roledefs.pickle is missing Signed-off-by: Samuli Seppänen <be49b59234361de284476e9a2215fb6477f46673@openvpn.net># -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * try: open("roledefs.pickle") env.roledefs = hostinfo.load_roledefs() except IOError: pass
<commit_before># -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * env.roledefs = hostinfo.load_roledefs() <commit_msg>Allow running tasks even if roledefs.pickle is missing Signed-off-by: Samuli Seppänen <be49b59234361de284476e9a2215fb6477f46673@openvpn.net><commit_after># -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * try: open("roledefs.pickle") env.roledefs = hostinfo.load_roledefs() except IOError: pass
3f394e47841b2d9e49554b21c67b06a46f99f25c
celery_app.py
celery_app.py
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', ]
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', 'lazyblacksmith.tasks.corporation.blueprints', ]
Add corporation task in celery data
Add corporation task in celery data
Python
bsd-3-clause
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', ] Add corporation task in celery data
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', 'lazyblacksmith.tasks.corporation.blueprints', ]
<commit_before># -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', ] <commit_msg>Add corporation task in celery data<commit_after>
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', 'lazyblacksmith.tasks.corporation.blueprints', ]
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', ] Add corporation task in celery data# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', 'lazyblacksmith.tasks.corporation.blueprints', ]
<commit_before># -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', ] <commit_msg>Add corporation task in celery data<commit_after># -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', 'lazyblacksmith.tasks.corporation.blueprints', ]
35c47f00f914935bd886c0b28ef618f451abc3b3
local_settings_example.py
local_settings_example.py
DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = '/usr/local/www/LIIT/media' STATICFILES_DIRS = ( '/usr/local/www/LIIT/static', ) TEMPLATE_DIRS = ( '/usr/local/www/LIIT/templates', )
import os PROJECT_DIR = os.path.abspath(os.path.dirname(__file__)) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = os.path.join(PROJECT_DIR, 'media') STATICFILES_DIRS = ( os.path.join(PROJECT_DIR, 'static'), ) TEMPLATE_DIRS = ( os.path.join(PROJECT_DIR, 'templates'), )
Update settings example for tpl directories and other stuff
Update settings example for tpl directories and other stuff
Python
bsd-3-clause
RocknRoot/LIIT
DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = '/usr/local/www/LIIT/media' STATICFILES_DIRS = ( '/usr/local/www/LIIT/static', ) TEMPLATE_DIRS = ( '/usr/local/www/LIIT/templates', ) Update settings example for tpl directories and other stuff
import os PROJECT_DIR = os.path.abspath(os.path.dirname(__file__)) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = os.path.join(PROJECT_DIR, 'media') STATICFILES_DIRS = ( os.path.join(PROJECT_DIR, 'static'), ) TEMPLATE_DIRS = ( os.path.join(PROJECT_DIR, 'templates'), )
<commit_before>DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = '/usr/local/www/LIIT/media' STATICFILES_DIRS = ( '/usr/local/www/LIIT/static', ) TEMPLATE_DIRS = ( '/usr/local/www/LIIT/templates', ) <commit_msg>Update settings example for tpl directories and other stuff<commit_after>
import os PROJECT_DIR = os.path.abspath(os.path.dirname(__file__)) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = os.path.join(PROJECT_DIR, 'media') STATICFILES_DIRS = ( os.path.join(PROJECT_DIR, 'static'), ) TEMPLATE_DIRS = ( os.path.join(PROJECT_DIR, 'templates'), )
DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = '/usr/local/www/LIIT/media' STATICFILES_DIRS = ( '/usr/local/www/LIIT/static', ) TEMPLATE_DIRS = ( '/usr/local/www/LIIT/templates', ) Update settings example for tpl directories and other stuffimport os PROJECT_DIR = os.path.abspath(os.path.dirname(__file__)) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = os.path.join(PROJECT_DIR, 'media') STATICFILES_DIRS = ( os.path.join(PROJECT_DIR, 'static'), ) TEMPLATE_DIRS = ( os.path.join(PROJECT_DIR, 'templates'), )
<commit_before>DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = '/usr/local/www/LIIT/media' STATICFILES_DIRS = ( '/usr/local/www/LIIT/static', ) TEMPLATE_DIRS = ( '/usr/local/www/LIIT/templates', ) <commit_msg>Update settings example for tpl directories and other stuff<commit_after>import os PROJECT_DIR = os.path.abspath(os.path.dirname(__file__)) DEBUG = True TEMPLATE_DEBUG = DEBUG ADMINS = ( # ('Your Name', 'your_email@example.com'), ) DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': '', 'USER': '', 'PASSWORD': '', 'HOST': '', } } TIME_ZONE = 'Europe/Paris' LANGUAGE_CODE = 'en-us' # Make this unique, and don't share it with anybody. SECRET_KEY = 'DO-SOMETHING-FOR-FRAKS-SAKE' MEDIA_ROOT = os.path.join(PROJECT_DIR, 'media') STATICFILES_DIRS = ( os.path.join(PROJECT_DIR, 'static'), ) TEMPLATE_DIRS = ( os.path.join(PROJECT_DIR, 'templates'), )
9d94a753c4824df210753996edaa9f7910df5fa8
tests/test_sample_app.py
tests/test_sample_app.py
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): client.get('/')
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): resp = client.get('/') assert resp.status == 200
Check for status code of 200 in sample app.
Check for status code of 200 in sample app.
Python
apache-2.0
JingZhou0404/flask-bootstrap,scorpiovn/flask-bootstrap,suvorom/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,suvorom/flask-bootstrap,victorbjorklund/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,livepy/flask-bootstrap,victorbjorklund/flask-bootstrap,dingocuster/flask-bootstrap,Coxious/flask-bootstrap,Coxious/flask-bootstrap,vishnugonela/flask-bootstrap,moha24/flask-bootstrap,eshijia/flask-bootstrap,dingocuster/flask-bootstrap,victorbjorklund/flask-bootstrap,eshijia/flask-bootstrap,vishnugonela/flask-bootstrap,JingZhou0404/flask-bootstrap,ser/flask-bootstrap,scorpiovn/flask-bootstrap,vishnugonela/flask-bootstrap,livepy/flask-bootstrap,JingZhou0404/flask-bootstrap,dingocuster/flask-bootstrap,livepy/flask-bootstrap,moha24/flask-bootstrap,Coxious/flask-bootstrap,BeardedSteve/flask-bootstrap,eshijia/flask-bootstrap,suvorom/flask-bootstrap,scorpiovn/flask-bootstrap,moha24/flask-bootstrap
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): client.get('/') Check for status code of 200 in sample app.
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): resp = client.get('/') assert resp.status == 200
<commit_before>import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): client.get('/') <commit_msg>Check for status code of 200 in sample app.<commit_after>
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): resp = client.get('/') assert resp.status == 200
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): client.get('/') Check for status code of 200 in sample app.import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): resp = client.get('/') assert resp.status == 200
<commit_before>import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): client.get('/') <commit_msg>Check for status code of 200 in sample app.<commit_after>import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): resp = client.get('/') assert resp.status == 200
f48afc99a7e7aa076aa27b33deda824b5509bab2
test_qt_helpers_qt5.py
test_qt_helpers_qt5.py
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
Comment out problematic test for now
Comment out problematic test for now
Python
bsd-3-clause
glue-viz/qt-helpers
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt Comment out problematic test for now
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
<commit_before>from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt <commit_msg>Comment out problematic test for now<commit_after>
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt Comment out problematic test for nowfrom __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
<commit_before>from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt <commit_msg>Comment out problematic test for now<commit_after>from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
15fe43d0be3c665c09c898864bd2815b39fbc8a5
toolbox/config/common.py
toolbox/config/common.py
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' # Once the Next platform supports challenge versions this can be extended. ACTIVE_REMOTE_BRANCHES = ['master'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
Change v3 active branches to
Change v3 active branches to [master] Extend the list when it becomes relevant. The old platform shall use the legacy branch.
Python
apache-2.0
avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'} Change v3 active branches to [master] Extend the list when it becomes relevant. The old platform shall use the legacy branch.
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' # Once the Next platform supports challenge versions this can be extended. ACTIVE_REMOTE_BRANCHES = ['master'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
<commit_before>CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'} <commit_msg>Change v3 active branches to [master] Extend the list when it becomes relevant. The old platform shall use the legacy branch.<commit_after>
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' # Once the Next platform supports challenge versions this can be extended. ACTIVE_REMOTE_BRANCHES = ['master'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'} Change v3 active branches to [master] Extend the list when it becomes relevant. The old platform shall use the legacy branch.CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' # Once the Next platform supports challenge versions this can be extended. ACTIVE_REMOTE_BRANCHES = ['master'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
<commit_before>CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'} <commit_msg>Change v3 active branches to [master] Extend the list when it becomes relevant. The old platform shall use the legacy branch.<commit_after>CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' # Once the Next platform supports challenge versions this can be extended. ACTIVE_REMOTE_BRANCHES = ['master'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
c2060336b7d20a774ce9e5ae93960ad680836274
modoboa_radicale/forms.py
modoboa_radicale/forms.py
"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.CharField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) )
"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.URLField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) )
Change form field to URLField.
Change form field to URLField. see #31
Python
mit
modoboa/modoboa-radicale,modoboa/modoboa-radicale,modoboa/modoboa-radicale
"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.CharField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) ) Change form field to URLField. see #31
"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.URLField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) )
<commit_before>"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.CharField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) ) <commit_msg>Change form field to URLField. see #31<commit_after>
"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.URLField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) )
"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.CharField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) ) Change form field to URLField. see #31"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.URLField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) )
<commit_before>"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.CharField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) ) <commit_msg>Change form field to URLField. see #31<commit_after>"""Radicale extension forms.""" from django import forms from django.utils.translation import ugettext_lazy from modoboa.lib import form_utils from modoboa.parameters import forms as param_forms class ParametersForm(param_forms.AdminParametersForm): """Global parameters.""" app = "modoboa_radicale" server_settings = form_utils.SeparatorField( label=ugettext_lazy("Server settings") ) server_location = forms.URLField( label=ugettext_lazy("Server URL"), help_text=ugettext_lazy( "The URL of your Radicale server. " "It will be used to construct calendar URLs." ), widget=forms.TextInput(attrs={"class": "form-control"}) ) rights_management_sep = form_utils.SeparatorField( label=ugettext_lazy("Rights management")) rights_file_path = forms.CharField( label=ugettext_lazy("Rights file's path"), initial="/etc/modoboa_radicale/rights", help_text=ugettext_lazy( "Path to the file that contains rights definition" ), widget=forms.TextInput(attrs={"class": "form-control"}) ) allow_calendars_administration = form_utils.YesNoField( label=ugettext_lazy("Allow calendars administration"), initial=False, help_text=ugettext_lazy( "Allow domain administrators to manage user calendars " "(read and write)" ) )
6bcc15b6d018560ebc368efcfc2c2c7d435c7dcc
strictify-coqdep.py
strictify-coqdep.py
#!/usr/bin/env python2 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
#!/usr/bin/env python3 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() stderr = stderr.decode('utf-8') reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
Switch from python2 to python3
Switch from python2 to python3 Closes #6
Python
mit
JasonGross/coq-scripts,JasonGross/coq-scripts
#!/usr/bin/env python2 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode) Switch from python2 to python3 Closes #6
#!/usr/bin/env python3 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() stderr = stderr.decode('utf-8') reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
<commit_before>#!/usr/bin/env python2 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode) <commit_msg>Switch from python2 to python3 Closes #6<commit_after>
#!/usr/bin/env python3 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() stderr = stderr.decode('utf-8') reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
#!/usr/bin/env python2 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode) Switch from python2 to python3 Closes #6#!/usr/bin/env python3 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() stderr = stderr.decode('utf-8') reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
<commit_before>#!/usr/bin/env python2 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode) <commit_msg>Switch from python2 to python3 Closes #6<commit_after>#!/usr/bin/env python3 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() stderr = stderr.decode('utf-8') reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
0415bc9e4a174b7cebb634a449473131fe16b3b2
bulbs/content/management/commands/reindex_content.py
bulbs/content/management/commands/reindex_content.py
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all()[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all().order_by('id')[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
Add ordering to queryset in reindex admin command
Add ordering to queryset in reindex admin command
Python
mit
theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all()[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed Add ordering to queryset in reindex admin command
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all().order_by('id')[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
<commit_before>from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all()[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed <commit_msg>Add ordering to queryset in reindex admin command<commit_after>
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all().order_by('id')[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all()[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed Add ordering to queryset in reindex admin commandfrom django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all().order_by('id')[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
<commit_before>from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all()[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed <commit_msg>Add ordering to queryset in reindex admin command<commit_after>from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all().order_by('id')[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
8247d3c1b6a9720f14db1130e087293c57587b54
weather.py
weather.py
""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']
""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'http://api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']
Fix URL scheme for OpenWeatherMap.
Fix URL scheme for OpenWeatherMap.
Python
mit
jrupac/nest-wfh
""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']Fix URL scheme for OpenWeatherMap.
""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'http://api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']
<commit_before>""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']<commit_msg>Fix URL scheme for OpenWeatherMap.<commit_after>
""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'http://api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']
""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']Fix URL scheme for OpenWeatherMap.""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'http://api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']
<commit_before>""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']<commit_msg>Fix URL scheme for OpenWeatherMap.<commit_after>""" Implementation of the OpenWeatherMap API. """ __author__ = 'ajay@roopakalu.com (Ajay Roopakalu)' OPEN_WEATHER_MAP_URL = 'http://api.openweathermap.org/data/2.5' WEATHER_URL = '/weather' import requests import log logging = log.Log(__name__) def GetCurrentExternalTemperature(appid, latitude, longitude): params = { 'APPID': appid, 'units': 'imperial', 'lat': latitude, 'lon': longitude } response = requests.put( OPEN_WEATHER_MAP_URL + WEATHER_URL, params=params) if response.status_code != 200: logging.exception('Unexpected response: ', response.text) response_parsed = response.json() if 'main' not in response_parsed or 'temp' not in response_parsed['main']: logging.exception('Expected fields not in response: ', response.text) return response_parsed['main']['temp']
a9a55f87abc0a26d41e3fa3091f2f2efad7a2543
autoencoder/encode.py
autoencoder/encode.py
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
Check input dimensions of pretrained model and input file
Check input dimensions of pretrained model and input file
Python
apache-2.0
theislab/dca,theislab/dca,theislab/dca
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir) Check input dimensions of pretrained model and input file
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
<commit_before>import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir) <commit_msg>Check input dimensions of pretrained model and input file<commit_after>
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir) Check input dimensions of pretrained model and input fileimport numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
<commit_before>import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir) <commit_msg>Check input dimensions of pretrained model and input file<commit_after>import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
68d7b3995c49abd8f7096f9498bdbddf6b696d81
back_office/models.py
back_office/models.py
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers')
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
Add enabled field to teacher model
Add enabled field to teacher model
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers') Add enabled field to teacher model
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
<commit_before>from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers') <commit_msg>Add enabled field to teacher model<commit_after>
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers') Add enabled field to teacher modelfrom django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
<commit_before>from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers') <commit_msg>Add enabled field to teacher model<commit_after>from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
7bdd06f568856c010a4eacb1e70c262fa4c3388c
bin/trigger_upload.py
bin/trigger_upload.py
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. Useful for manually triggering Fedimg jobs. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 2: print 'Usage: trigger_upload.py <rawxz_image_url>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] fedimg.uploader.upload(upload_pool, [url])
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 3: print 'Usage: trigger_upload.py <rawxz_image_url> <compose_id>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] compose_id = sys.argv[2] compose_meta = { 'compose_id': compose_id } fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
Fix the manual upload trigger script
scripts: Fix the manual upload trigger script Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
Python
agpl-3.0
fedora-infra/fedimg,fedora-infra/fedimg
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. Useful for manually triggering Fedimg jobs. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 2: print 'Usage: trigger_upload.py <rawxz_image_url>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] fedimg.uploader.upload(upload_pool, [url]) scripts: Fix the manual upload trigger script Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 3: print 'Usage: trigger_upload.py <rawxz_image_url> <compose_id>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] compose_id = sys.argv[2] compose_meta = { 'compose_id': compose_id } fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
<commit_before>#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. Useful for manually triggering Fedimg jobs. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 2: print 'Usage: trigger_upload.py <rawxz_image_url>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] fedimg.uploader.upload(upload_pool, [url]) <commit_msg>scripts: Fix the manual upload trigger script Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com><commit_after>
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 3: print 'Usage: trigger_upload.py <rawxz_image_url> <compose_id>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] compose_id = sys.argv[2] compose_meta = { 'compose_id': compose_id } fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. Useful for manually triggering Fedimg jobs. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 2: print 'Usage: trigger_upload.py <rawxz_image_url>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] fedimg.uploader.upload(upload_pool, [url]) scripts: Fix the manual upload trigger script Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 3: print 'Usage: trigger_upload.py <rawxz_image_url> <compose_id>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] compose_id = sys.argv[2] compose_meta = { 'compose_id': compose_id } fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
<commit_before>#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. Useful for manually triggering Fedimg jobs. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 2: print 'Usage: trigger_upload.py <rawxz_image_url>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] fedimg.uploader.upload(upload_pool, [url]) <commit_msg>scripts: Fix the manual upload trigger script Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com><commit_after>#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 3: print 'Usage: trigger_upload.py <rawxz_image_url> <compose_id>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] compose_id = sys.argv[2] compose_meta = { 'compose_id': compose_id } fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
b503a6e893d71b96b3737e567dde16f110db5fc7
src/prepare_turk_batch.py
src/prepare_turk_batch.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for fname in os.listdir(args.input): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for i, fname in enumerate(os.listdir(args.input)): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) for j, (prompt, time_range) in enumerate(doc["prompts"]): doc["id"] = "doc-{}-{}".format(i,j) doc["prompt"] = prompt doc["recommendedMinWordCount"] = time_range[0] doc["recommendedMaxWordCount"] = time_range[1] writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
Prepare data with the new fiields and prompts
Prepare data with the new fiields and prompts
Python
mit
arunchaganty/briefly,arunchaganty/briefly,arunchaganty/briefly,arunchaganty/briefly
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for fname in os.listdir(args.input): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS) Prepare data with the new fiields and prompts
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for i, fname in enumerate(os.listdir(args.input)): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) for j, (prompt, time_range) in enumerate(doc["prompts"]): doc["id"] = "doc-{}-{}".format(i,j) doc["prompt"] = prompt doc["recommendedMinWordCount"] = time_range[0] doc["recommendedMaxWordCount"] = time_range[1] writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
<commit_before>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for fname in os.listdir(args.input): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS) <commit_msg>Prepare data with the new fiields and prompts<commit_after>
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for i, fname in enumerate(os.listdir(args.input)): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) for j, (prompt, time_range) in enumerate(doc["prompts"]): doc["id"] = "doc-{}-{}".format(i,j) doc["prompt"] = prompt doc["recommendedMinWordCount"] = time_range[0] doc["recommendedMaxWordCount"] = time_range[1] writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for fname in os.listdir(args.input): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS) Prepare data with the new fiields and prompts#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for i, fname in enumerate(os.listdir(args.input)): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) for j, (prompt, time_range) in enumerate(doc["prompts"]): doc["id"] = "doc-{}-{}".format(i,j) doc["prompt"] = prompt doc["recommendedMinWordCount"] = time_range[0] doc["recommendedMaxWordCount"] = time_range[1] writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
<commit_before>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for fname in os.listdir(args.input): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS) <commit_msg>Prepare data with the new fiields and prompts<commit_after>#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for i, fname in enumerate(os.listdir(args.input)): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) for j, (prompt, time_range) in enumerate(doc["prompts"]): doc["id"] = "doc-{}-{}".format(i,j) doc["prompt"] = prompt doc["recommendedMinWordCount"] = time_range[0] doc["recommendedMaxWordCount"] = time_range[1] writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
dfb79b9f148663617048a3c2a310b2a66a1c7103
marxbot.py
marxbot.py
from errbot import BotPlugin, botcmd, webhook class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' @botcmd(split_args_with=None) def marx(self, message, args): return "what a guy"
from errbot import BotPlugin, botcmd, webhook import pytumblr class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' tumblr_client = None def activate(self): super().activate() if self.config is None or self.config["consumer_key"] == "" or self.config["consumer_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth consumer key/secret") return if self.config["oauth_token"] == "" or self.config["oauth_token_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth token key/secret (for now)") return self.tumblr_client = pytumblr.TumblrRestClient( self.config["consumer_key"], self.config["consumer_secret"], self.config["oauth_token"], self.config["oauth_token_secret"]) def get_configuration_template(self): return {"consumer_key": "", "consumer_secret": "", "oauth_token": "", "oauth_token_secret": ""} @botcmd def marx(self, message, args): if self.tumblr_client is None: return "MarxBot must be configured and restarted to serve quotes." post = self.tumblr_client.posts("dailymarx", limit=1)['posts'][0] self['latest_post'] = post return str(post['text'])
Use the Tumblr API to get Marx quotes
Use the Tumblr API to get Marx quotes
Python
mit
AbigailBuccaneer/err-dailymarx
from errbot import BotPlugin, botcmd, webhook class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' @botcmd(split_args_with=None) def marx(self, message, args): return "what a guy" Use the Tumblr API to get Marx quotes
from errbot import BotPlugin, botcmd, webhook import pytumblr class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' tumblr_client = None def activate(self): super().activate() if self.config is None or self.config["consumer_key"] == "" or self.config["consumer_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth consumer key/secret") return if self.config["oauth_token"] == "" or self.config["oauth_token_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth token key/secret (for now)") return self.tumblr_client = pytumblr.TumblrRestClient( self.config["consumer_key"], self.config["consumer_secret"], self.config["oauth_token"], self.config["oauth_token_secret"]) def get_configuration_template(self): return {"consumer_key": "", "consumer_secret": "", "oauth_token": "", "oauth_token_secret": ""} @botcmd def marx(self, message, args): if self.tumblr_client is None: return "MarxBot must be configured and restarted to serve quotes." post = self.tumblr_client.posts("dailymarx", limit=1)['posts'][0] self['latest_post'] = post return str(post['text'])
<commit_before>from errbot import BotPlugin, botcmd, webhook class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' @botcmd(split_args_with=None) def marx(self, message, args): return "what a guy" <commit_msg>Use the Tumblr API to get Marx quotes<commit_after>
from errbot import BotPlugin, botcmd, webhook import pytumblr class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' tumblr_client = None def activate(self): super().activate() if self.config is None or self.config["consumer_key"] == "" or self.config["consumer_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth consumer key/secret") return if self.config["oauth_token"] == "" or self.config["oauth_token_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth token key/secret (for now)") return self.tumblr_client = pytumblr.TumblrRestClient( self.config["consumer_key"], self.config["consumer_secret"], self.config["oauth_token"], self.config["oauth_token_secret"]) def get_configuration_template(self): return {"consumer_key": "", "consumer_secret": "", "oauth_token": "", "oauth_token_secret": ""} @botcmd def marx(self, message, args): if self.tumblr_client is None: return "MarxBot must be configured and restarted to serve quotes." post = self.tumblr_client.posts("dailymarx", limit=1)['posts'][0] self['latest_post'] = post return str(post['text'])
from errbot import BotPlugin, botcmd, webhook class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' @botcmd(split_args_with=None) def marx(self, message, args): return "what a guy" Use the Tumblr API to get Marx quotesfrom errbot import BotPlugin, botcmd, webhook import pytumblr class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' tumblr_client = None def activate(self): super().activate() if self.config is None or self.config["consumer_key"] == "" or self.config["consumer_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth consumer key/secret") return if self.config["oauth_token"] == "" or self.config["oauth_token_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth token key/secret (for now)") return self.tumblr_client = pytumblr.TumblrRestClient( self.config["consumer_key"], self.config["consumer_secret"], self.config["oauth_token"], self.config["oauth_token_secret"]) def get_configuration_template(self): return {"consumer_key": "", "consumer_secret": "", "oauth_token": "", "oauth_token_secret": ""} @botcmd def marx(self, message, args): if self.tumblr_client is None: return "MarxBot must be configured and restarted to serve quotes." post = self.tumblr_client.posts("dailymarx", limit=1)['posts'][0] self['latest_post'] = post return str(post['text'])
<commit_before>from errbot import BotPlugin, botcmd, webhook class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' @botcmd(split_args_with=None) def marx(self, message, args): return "what a guy" <commit_msg>Use the Tumblr API to get Marx quotes<commit_after>from errbot import BotPlugin, botcmd, webhook import pytumblr class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' tumblr_client = None def activate(self): super().activate() if self.config is None or self.config["consumer_key"] == "" or self.config["consumer_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth consumer key/secret") return if self.config["oauth_token"] == "" or self.config["oauth_token_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth token key/secret (for now)") return self.tumblr_client = pytumblr.TumblrRestClient( self.config["consumer_key"], self.config["consumer_secret"], self.config["oauth_token"], self.config["oauth_token_secret"]) def get_configuration_template(self): return {"consumer_key": "", "consumer_secret": "", "oauth_token": "", "oauth_token_secret": ""} @botcmd def marx(self, message, args): if self.tumblr_client is None: return "MarxBot must be configured and restarted to serve quotes." post = self.tumblr_client.posts("dailymarx", limit=1)['posts'][0] self['latest_post'] = post return str(post['text'])
19354bd82a89383d795cdada8d6af78e8f12eed8
src/server/test_client.py
src/server/test_client.py
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: caller.setData("test", "success") print(caller.getData("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: print(caller.SharedClientDataStore__set("test", "success")) print(caller.SharedClientDtaStore__get("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
Update call method in test client
Update call method in test client
Python
mit
cnlohr/bridgesim,cnlohr/bridgesim,cnlohr/bridgesim,cnlohr/bridgesim
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: caller.setData("test", "success") print(caller.getData("test", default="failish")) except TimeoutError: print("Timed out.") nw.close() Update call method in test client
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: print(caller.SharedClientDataStore__set("test", "success")) print(caller.SharedClientDtaStore__get("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
<commit_before>#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: caller.setData("test", "success") print(caller.getData("test", default="failish")) except TimeoutError: print("Timed out.") nw.close() <commit_msg>Update call method in test client<commit_after>
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: print(caller.SharedClientDataStore__set("test", "success")) print(caller.SharedClientDtaStore__get("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: caller.setData("test", "success") print(caller.getData("test", default="failish")) except TimeoutError: print("Timed out.") nw.close() Update call method in test client#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: print(caller.SharedClientDataStore__set("test", "success")) print(caller.SharedClientDtaStore__get("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
<commit_before>#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: caller.setData("test", "success") print(caller.getData("test", default="failish")) except TimeoutError: print("Timed out.") nw.close() <commit_msg>Update call method in test client<commit_after>#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: print(caller.SharedClientDataStore__set("test", "success")) print(caller.SharedClientDtaStore__get("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()