commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3166d48bed0bd7663a3332736a236d8f424b6cd3 | room/views.py | room/views.py | from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
| from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
from hub.models import Hub
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
hub_token = request.DATA['hub_token']
except:
raise ParseError(detail='hub_token must be specified')
try:
hub = Hub.objects.get(token=hub_token)
except Hub.DoesNotExist:
raise PermissionDenied()
if not hub.room_permissions.filter(id=room.id).exists():
raise PermissionDenied()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
| Check hub permission @ report_availability | Check hub permission @ report_availability
| Python | mit | iver56/useat-api | from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
Check hub permission @ report_availability | from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
from hub.models import Hub
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
hub_token = request.DATA['hub_token']
except:
raise ParseError(detail='hub_token must be specified')
try:
hub = Hub.objects.get(token=hub_token)
except Hub.DoesNotExist:
raise PermissionDenied()
if not hub.room_permissions.filter(id=room.id).exists():
raise PermissionDenied()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
| <commit_before>from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
<commit_msg>Check hub permission @ report_availability<commit_after> | from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
from hub.models import Hub
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
hub_token = request.DATA['hub_token']
except:
raise ParseError(detail='hub_token must be specified')
try:
hub = Hub.objects.get(token=hub_token)
except Hub.DoesNotExist:
raise PermissionDenied()
if not hub.room_permissions.filter(id=room.id).exists():
raise PermissionDenied()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
| from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
Check hub permission @ report_availabilityfrom .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
from hub.models import Hub
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
hub_token = request.DATA['hub_token']
except:
raise ParseError(detail='hub_token must be specified')
try:
hub = Hub.objects.get(token=hub_token)
except Hub.DoesNotExist:
raise PermissionDenied()
if not hub.room_permissions.filter(id=room.id).exists():
raise PermissionDenied()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
| <commit_before>from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
<commit_msg>Check hub permission @ report_availability<commit_after>from .models import Room
from rest_framework import viewsets
from .serializers import RoomDetailSerializer, RoomListSerializer
from rest_framework.decorators import detail_route
from django.core.exceptions import PermissionDenied
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from django.utils import timezone
from hub.models import Hub
class RoomViewSet(viewsets.ModelViewSet):
queryset = Room.objects.all()
def get_serializer_class(self):
if self.action == 'retrieve':
return RoomDetailSerializer
return RoomListSerializer
@detail_route(methods=['post'])
def report_availability(self, request, pk=None):
room = self.get_object()
try:
hub_token = request.DATA['hub_token']
except:
raise ParseError(detail='hub_token must be specified')
try:
hub = Hub.objects.get(token=hub_token)
except Hub.DoesNotExist:
raise PermissionDenied()
if not hub.room_permissions.filter(id=room.id).exists():
raise PermissionDenied()
try:
is_available = int(request.DATA['is_available'])
except:
raise ParseError(detail="Must specify is_available that is an integer")
if is_available < 0:
raise ParseError(detail="is_available must be non-negative")
if is_available:
room.available_since = timezone.now()
else:
room.available_since = None
room.save()
return Response({'status': 'ok'})
|
6df54729954d5f9bd25549c482b3bc170a338ff0 | tests/completion.py | tests/completion.py | import sys
from nose.tools import ok_
from _utils import _output_eq, IntegrationSpec, _dispatch, trap, expect_exit
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
ok_(flag in output)
| import sys
from nose.tools import ok_
from _utils import (
_output_eq, IntegrationSpec, _dispatch, trap, expect_exit, assert_contains
)
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
assert_contains(output, "{0}\n".format(flag))
| Make new test use contains assertion | Make new test use contains assertion
| Python | bsd-2-clause | mattrobenolt/invoke,frol/invoke,pfmoore/invoke,singingwolfboy/invoke,pyinvoke/invoke,kejbaly2/invoke,tyewang/invoke,pyinvoke/invoke,mkusz/invoke,frol/invoke,pfmoore/invoke,mkusz/invoke,mattrobenolt/invoke,kejbaly2/invoke | import sys
from nose.tools import ok_
from _utils import _output_eq, IntegrationSpec, _dispatch, trap, expect_exit
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
ok_(flag in output)
Make new test use contains assertion | import sys
from nose.tools import ok_
from _utils import (
_output_eq, IntegrationSpec, _dispatch, trap, expect_exit, assert_contains
)
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
assert_contains(output, "{0}\n".format(flag))
| <commit_before>import sys
from nose.tools import ok_
from _utils import _output_eq, IntegrationSpec, _dispatch, trap, expect_exit
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
ok_(flag in output)
<commit_msg>Make new test use contains assertion<commit_after> | import sys
from nose.tools import ok_
from _utils import (
_output_eq, IntegrationSpec, _dispatch, trap, expect_exit, assert_contains
)
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
assert_contains(output, "{0}\n".format(flag))
| import sys
from nose.tools import ok_
from _utils import _output_eq, IntegrationSpec, _dispatch, trap, expect_exit
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
ok_(flag in output)
Make new test use contains assertionimport sys
from nose.tools import ok_
from _utils import (
_output_eq, IntegrationSpec, _dispatch, trap, expect_exit, assert_contains
)
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
assert_contains(output, "{0}\n".format(flag))
| <commit_before>import sys
from nose.tools import ok_
from _utils import _output_eq, IntegrationSpec, _dispatch, trap, expect_exit
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
ok_(flag in output)
<commit_msg>Make new test use contains assertion<commit_after>import sys
from nose.tools import ok_
from _utils import (
_output_eq, IntegrationSpec, _dispatch, trap, expect_exit, assert_contains
)
class ShellCompletion(IntegrationSpec):
"""
Shell tab-completion behavior
"""
def no_input_means_just_task_names(self):
_output_eq('-c simple_ns_list --complete', "z_toplevel\na.b.subtask\n")
def no_input_with_no_tasks_yields_empty_response(self):
_output_eq('-c empty --complete', "")
@trap
def top_level_with_dash_means_core_options(self):
with expect_exit(0):
_dispatch('inv --complete -- -')
output = sys.stdout.getvalue()
# No point mirroring all core options, just spot check a few
for flag in ('--no-dedupe', '-d', '--debug', '-V', '--version'):
assert_contains(output, "{0}\n".format(flag))
|
71fd42a92b41529d9f5c784840ab4c190946adef | social_auth/backends/pipeline/associate.py | social_auth/backends/pipeline/associate.py | from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
| from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
| Remove spammy warning which doesn't apply when stores check emails | Remove spammy warning which doesn't apply when stores check emails
| Python | bsd-3-clause | antoviaque/django-social-auth-norel | from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
Remove spammy warning which doesn't apply when stores check emails | from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
| <commit_before>from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
<commit_msg>Remove spammy warning which doesn't apply when stores check emails<commit_after> | from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
| from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
Remove spammy warning which doesn't apply when stores check emailsfrom django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
| <commit_before>from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
<commit_msg>Remove spammy warning which doesn't apply when stores check emails<commit_after>from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist
from social_auth.utils import setting
from social_auth.models import UserSocialAuth
from social_auth.backends.pipeline import warn_setting
from social_auth.backends.exceptions import AuthException
def associate_by_email(details, user=None, *args, **kwargs):
"""Return user entry with same email address as one returned on details."""
if user:
return None
email = details.get('email')
# Don't spam with a warning, this doesn't apply when providers check emails
#warn_setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', 'associate_by_email')
if email and setting('SOCIAL_AUTH_ASSOCIATE_BY_MAIL', False):
# try to associate accounts registered with the same email address,
# only if it's a single object. AuthException is raised if multiple
# objects are returned
try:
return {'user': UserSocialAuth.get_user_by_email(email=email)}
except MultipleObjectsReturned:
raise AuthException(kwargs['backend'], 'Not unique email address.')
except ObjectDoesNotExist:
pass
|
7e4c38c482bc908e85d8121c8e9d70b07ad9cd0d | salt/returners/mongo_return.py | salt/returners/mongo_return.py | '''
Return data to a mongodb server
This is the default interface for returning data for the butter statd subsytem
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
| '''
Return data to a mongodb server
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
| Remove reference to butter since butter is deprecated | Remove reference to butter since butter is deprecated
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | '''
Return data to a mongodb server
This is the default interface for returning data for the butter statd subsytem
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
Remove reference to butter since butter is deprecated | '''
Return data to a mongodb server
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
| <commit_before>'''
Return data to a mongodb server
This is the default interface for returning data for the butter statd subsytem
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
<commit_msg>Remove reference to butter since butter is deprecated<commit_after> | '''
Return data to a mongodb server
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
| '''
Return data to a mongodb server
This is the default interface for returning data for the butter statd subsytem
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
Remove reference to butter since butter is deprecated'''
Return data to a mongodb server
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
| <commit_before>'''
Return data to a mongodb server
This is the default interface for returning data for the butter statd subsytem
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
<commit_msg>Remove reference to butter since butter is deprecated<commit_after>'''
Return data to a mongodb server
Required python modules: pymongo
'''
import logging
try:
import pymongo
has_pymongo = True
except ImportError:
has_pymongo = False
log = logging.getLogger(__name__)
__opts__ = {'mongo.db': 'salt',
'mongo.host': 'salt',
'mongo.password': '',
'mongo.port': 27017,
'mongo.user': ''}
def __virtual__():
if not has_pymongo:
return False
return 'mongo_return'
def returner(ret):
'''
Return data to a mongodb server
'''
conn = pymongo.Connection(__opts__['mongo.host'],
__opts__['mongo.port'])
db = conn[__opts__['mongo.db']]
user = __opts__.get('mongo.user')
password = __opts__.get('mongo.password')
if user and password:
db.authenticate(user, password)
col = db[ret['id']]
back = {}
if isinstance(ret['return'], dict):
for key in ret['return']:
back[key.replace('.', '-')] = ret['return'][key]
else:
back = ret['return']
log.debug(back)
col.insert({ret['jid']: back})
|
6811b04a0a2e311fc2d014cd601da61a2f17a451 | sell/views.py | sell/views.py | from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if request.session['personal_data']:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!") | from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if 'personal_data' in request.session:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!") | Fix KeyError in Provide personal data form | Fix KeyError in Provide personal data form
| Python | agpl-3.0 | m4tx/egielda,m4tx/egielda,m4tx/egielda | from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if request.session['personal_data']:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!")Fix KeyError in Provide personal data form | from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if 'personal_data' in request.session:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!") | <commit_before>from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if request.session['personal_data']:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!")<commit_msg>Fix KeyError in Provide personal data form<commit_after> | from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if 'personal_data' in request.session:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!") | from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if request.session['personal_data']:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!")Fix KeyError in Provide personal data formfrom django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if 'personal_data' in request.session:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!") | <commit_before>from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if request.session['personal_data']:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!")<commit_msg>Fix KeyError in Provide personal data form<commit_after>from django.core.urlresolvers import reverse
from django.forms import model_to_dict
from django.http.response import HttpResponseRedirect, HttpResponse
from django.shortcuts import render
from common.models import Student
from sell.forms import PersonalDataForm
def index(request):
return HttpResponseRedirect(reverse('sell.views.personal_data'))
def personal_data(request):
if request.method == 'POST':
form = PersonalDataForm(request.POST)
if form.is_valid():
request.session['personal_data'] = model_to_dict(form.save(commit=False))
return HttpResponseRedirect(reverse('sell.views.books'))
else:
if 'personal_data' in request.session:
form = PersonalDataForm(instance=Student(**request.session['personal_data']))
else:
form = PersonalDataForm()
return render(request, 'sell/personal_data.html', {'form': form})
def books(request):
return HttpResponse("Hello, world!") |
7e76ced5a75a1d89be384fb4d748c3c0599bfaea | bugimporters/items.py | bugimporters/items.py | import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
| import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
_deleted = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
| Create _deleted field in ParsedBug | Create _deleted field in ParsedBug
We are migrating toward using a special ParsedBug flag to
indicate a bug is remotely deleted, rather than calling a
callback.
| Python | agpl-3.0 | openhatch/oh-bugimporters,openhatch/oh-bugimporters,openhatch/oh-bugimporters | import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
Create _deleted field in ParsedBug
We are migrating toward using a special ParsedBug flag to
indicate a bug is remotely deleted, rather than calling a
callback. | import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
_deleted = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
| <commit_before>import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
<commit_msg>Create _deleted field in ParsedBug
We are migrating toward using a special ParsedBug flag to
indicate a bug is remotely deleted, rather than calling a
callback.<commit_after> | import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
_deleted = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
| import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
Create _deleted field in ParsedBug
We are migrating toward using a special ParsedBug flag to
indicate a bug is remotely deleted, rather than calling a
callback.import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
_deleted = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
| <commit_before>import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
<commit_msg>Create _deleted field in ParsedBug
We are migrating toward using a special ParsedBug flag to
indicate a bug is remotely deleted, rather than calling a
callback.<commit_after>import scrapy.item
class ParsedBug(scrapy.item.Item):
# Fields beginning with an underscore are not really part of a
# bug, but extra information that can be exported.
_project_name = scrapy.item.Field()
_tracker_name = scrapy.item.Field()
_deleted = scrapy.item.Field()
# These fields correspond to bug data
title = scrapy.item.Field()
description = scrapy.item.Field()
status = scrapy.item.Field()
importance = scrapy.item.Field()
people_involved = scrapy.item.Field()
date_reported = scrapy.item.Field()
last_touched = scrapy.item.Field()
submitter_username = scrapy.item.Field()
submitter_realname = scrapy.item.Field()
canonical_bug_link = scrapy.item.Field()
looks_closed = scrapy.item.Field()
last_polled = scrapy.item.Field()
as_appears_in_distribution = scrapy.item.Field()
good_for_newcomers = scrapy.item.Field()
concerns_just_documentation = scrapy.item.Field()
|
9a5d2a6f9efefb5b1647de5e467a9dfb74b86c9b | buffpy/tests/test_link.py | buffpy/tests/test_link.py | from nose.tools import eq_
from mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
mocked_api.get.assert_called_once_with(url='links/shares.json?url=www.google.com')
def test_links_get_shares():
'''
Test link's shares retrieving method
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
eq_(link.get_shares(), 123)
mocked_api.get.assert_any_call(url='links/shares.json?url=www.google.com')
eq_(mocked_api.get.call_count, 2)
| from unittest.mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
""" Test link"s shares retrieving from constructor. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
mocked_api.get.assert_called_once_with(url="links/shares.json?url=www.google.com")
def test_links_get_shares():
""" Test link"s shares retrieving method. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
assert link.get_shares() == 123
mocked_api.get.assert_any_call(url="links/shares.json?url=www.google.com")
assert mocked_api.get.call_count == 2
| Migrate link tests to pytest | Migrate link tests to pytest
| Python | mit | vtemian/buffpy | from nose.tools import eq_
from mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
mocked_api.get.assert_called_once_with(url='links/shares.json?url=www.google.com')
def test_links_get_shares():
'''
Test link's shares retrieving method
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
eq_(link.get_shares(), 123)
mocked_api.get.assert_any_call(url='links/shares.json?url=www.google.com')
eq_(mocked_api.get.call_count, 2)
Migrate link tests to pytest | from unittest.mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
""" Test link"s shares retrieving from constructor. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
mocked_api.get.assert_called_once_with(url="links/shares.json?url=www.google.com")
def test_links_get_shares():
""" Test link"s shares retrieving method. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
assert link.get_shares() == 123
mocked_api.get.assert_any_call(url="links/shares.json?url=www.google.com")
assert mocked_api.get.call_count == 2
| <commit_before>from nose.tools import eq_
from mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
mocked_api.get.assert_called_once_with(url='links/shares.json?url=www.google.com')
def test_links_get_shares():
'''
Test link's shares retrieving method
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
eq_(link.get_shares(), 123)
mocked_api.get.assert_any_call(url='links/shares.json?url=www.google.com')
eq_(mocked_api.get.call_count, 2)
<commit_msg>Migrate link tests to pytest<commit_after> | from unittest.mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
""" Test link"s shares retrieving from constructor. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
mocked_api.get.assert_called_once_with(url="links/shares.json?url=www.google.com")
def test_links_get_shares():
""" Test link"s shares retrieving method. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
assert link.get_shares() == 123
mocked_api.get.assert_any_call(url="links/shares.json?url=www.google.com")
assert mocked_api.get.call_count == 2
| from nose.tools import eq_
from mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
mocked_api.get.assert_called_once_with(url='links/shares.json?url=www.google.com')
def test_links_get_shares():
'''
Test link's shares retrieving method
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
eq_(link.get_shares(), 123)
mocked_api.get.assert_any_call(url='links/shares.json?url=www.google.com')
eq_(mocked_api.get.call_count, 2)
Migrate link tests to pytestfrom unittest.mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
""" Test link"s shares retrieving from constructor. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
mocked_api.get.assert_called_once_with(url="links/shares.json?url=www.google.com")
def test_links_get_shares():
""" Test link"s shares retrieving method. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
assert link.get_shares() == 123
mocked_api.get.assert_any_call(url="links/shares.json?url=www.google.com")
assert mocked_api.get.call_count == 2
| <commit_before>from nose.tools import eq_
from mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
'''
Test link's shares retrieving from constructor
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
mocked_api.get.assert_called_once_with(url='links/shares.json?url=www.google.com')
def test_links_get_shares():
'''
Test link's shares retrieving method
'''
mocked_api = MagicMock()
mocked_api.get.return_value = {'shares': 123}
link = Link(api=mocked_api, url='www.google.com')
eq_(link, {'shares': 123, 'url': 'www.google.com', 'api': mocked_api})
eq_(link.get_shares(), 123)
mocked_api.get.assert_any_call(url='links/shares.json?url=www.google.com')
eq_(mocked_api.get.call_count, 2)
<commit_msg>Migrate link tests to pytest<commit_after>from unittest.mock import MagicMock
from buffpy.models.link import Link
def test_links_shares():
""" Test link"s shares retrieving from constructor. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
mocked_api.get.assert_called_once_with(url="links/shares.json?url=www.google.com")
def test_links_get_shares():
""" Test link"s shares retrieving method. """
mocked_api = MagicMock()
mocked_api.get.return_value = {"shares": 123}
link = Link(api=mocked_api, url="www.google.com")
assert link["shares"] == 123
assert link["url"] == "www.google.com"
assert link.get_shares() == 123
mocked_api.get.assert_any_call(url="links/shares.json?url=www.google.com")
assert mocked_api.get.call_count == 2
|
7989252dd687dfaa1fd12ed8900c947190bfe4f7 | ichnaea/cache.py | ichnaea/cache.py | import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(host=r_host,
port=r_port,
db=r_db,
connection_pool=pool)
| import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
host=r_host,
port=r_port,
db=r_db,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(connection_pool=pool)
| Set redis connection info on the right class (the pool). | Set redis connection info on the right class (the pool).
| Python | apache-2.0 | therewillbecode/ichnaea,mozilla/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea,mozilla/ichnaea | import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(host=r_host,
port=r_port,
db=r_db,
connection_pool=pool)
Set redis connection info on the right class (the pool). | import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
host=r_host,
port=r_port,
db=r_db,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(connection_pool=pool)
| <commit_before>import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(host=r_host,
port=r_port,
db=r_db,
connection_pool=pool)
<commit_msg>Set redis connection info on the right class (the pool).<commit_after> | import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
host=r_host,
port=r_port,
db=r_db,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(connection_pool=pool)
| import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(host=r_host,
port=r_port,
db=r_db,
connection_pool=pool)
Set redis connection info on the right class (the pool).import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
host=r_host,
port=r_port,
db=r_db,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(connection_pool=pool)
| <commit_before>import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(host=r_host,
port=r_port,
db=r_db,
connection_pool=pool)
<commit_msg>Set redis connection info on the right class (the pool).<commit_after>import redis
import urlparse
def redis_client(redis_url):
r_url = urlparse.urlparse(redis_url)
r_host = r_url.netloc.split(":")[0]
r_port = int(r_url.netloc.split(":")[1])
r_db = int(r_url.path[1:])
pool = redis.ConnectionPool(
max_connections=100,
host=r_host,
port=r_port,
db=r_db,
socket_timeout=10.0,
socket_connect_timeout=30.0,
socket_keepalive=True,
)
return redis.StrictRedis(connection_pool=pool)
|
a29d712b69f64ec248fb7f6829da9996dc5b217a | tests/integration/test_with_activemq.py | tests/integration/test_with_activemq.py | from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
| import os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
| Test ActiveMQ under SSL connection | Test ActiveMQ under SSL connection
| Python | bsd-3-clause | rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective | from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
Test ActiveMQ under SSL connection | import os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
| <commit_before>from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
<commit_msg>Test ActiveMQ under SSL connection<commit_after> | import os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
| from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
Test ActiveMQ under SSL connectionimport os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
| <commit_before>from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
<commit_msg>Test ActiveMQ under SSL connection<commit_after>import os
from pymco.test import ctxt
from . import base
class ActiveMQTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
}
class TestWithActiveMQMCo20x(base.MCollective20x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo22x(base.MCollective22x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
class TestWithActiveMQSSLMCo23x(base.MCollective23x, ActiveMQTestCase):
'''MCollective integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61615,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.activemq.pool.1.ssl': 'true',
'plugin.activemq.pool.1.ssl.ca': os.path.join(ctxt.ROOT,
'fixtures/ca.pem'),
'plugin.activemq.pool.1.ssl.key': os.path.join(
ctxt.ROOT,
'fixtures/activemq_private.pem'),
'plugin.activemq.pool.1.ssl.cert': os.path.join(
ctxt.ROOT,
'fixtures/activemq_cert.pem',
),
}
|
8c0af29e7b6ec3a5e76fdb1efc56068bf276ad39 | helenae/flask_app.py | helenae/flask_app.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
import web.admin
import web.views | from flask import Flask, request, session
from flask_sqlalchemy import SQLAlchemy
from flask.ext.babelex import Babel
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
# Initialize babel
babel = Babel(app)
@babel.localeselector
def get_locale():
override = request.args.get('lang')
if override:
session['lang'] = override
return session.get('lang', 'ru')
import web.admin
import web.views | Add babel plugin for Flask | Add babel plugin for Flask
| Python | mit | Relrin/Helenae,Relrin/Helenae,Relrin/Helenae | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
import web.admin
import web.viewsAdd babel plugin for Flask | from flask import Flask, request, session
from flask_sqlalchemy import SQLAlchemy
from flask.ext.babelex import Babel
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
# Initialize babel
babel = Babel(app)
@babel.localeselector
def get_locale():
override = request.args.get('lang')
if override:
session['lang'] = override
return session.get('lang', 'ru')
import web.admin
import web.views | <commit_before>from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
import web.admin
import web.views<commit_msg>Add babel plugin for Flask<commit_after> | from flask import Flask, request, session
from flask_sqlalchemy import SQLAlchemy
from flask.ext.babelex import Babel
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
# Initialize babel
babel = Babel(app)
@babel.localeselector
def get_locale():
override = request.args.get('lang')
if override:
session['lang'] = override
return session.get('lang', 'ru')
import web.admin
import web.views | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
import web.admin
import web.viewsAdd babel plugin for Flaskfrom flask import Flask, request, session
from flask_sqlalchemy import SQLAlchemy
from flask.ext.babelex import Babel
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
# Initialize babel
babel = Babel(app)
@babel.localeselector
def get_locale():
override = request.args.get('lang')
if override:
session['lang'] = override
return session.get('lang', 'ru')
import web.admin
import web.views | <commit_before>from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
import web.admin
import web.views<commit_msg>Add babel plugin for Flask<commit_after>from flask import Flask, request, session
from flask_sqlalchemy import SQLAlchemy
from flask.ext.babelex import Babel
from db import tables as dbTables
app = Flask(__name__, template_folder='./web/templates/', static_folder='./web/static/', static_url_path='')
app.config['SECRET_KEY'] = 'some_secret_key'
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://user:password@localhost/csan'
db_connection = SQLAlchemy(app)
# Initialize babel
babel = Babel(app)
@babel.localeselector
def get_locale():
override = request.args.get('lang')
if override:
session['lang'] = override
return session.get('lang', 'ru')
import web.admin
import web.views |
747af88d56dc274638f515825405f58b0e59b8d7 | invoice/views.py | invoice/views.py | from django.shortcuts import get_object_or_404
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
| from django.shortcuts import get_object_or_404
from django.contrib.auth.decorators import login_required
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
@login_required
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
| Make sure user in logged in for showing invoice | Make sure user in logged in for showing invoice
| Python | bsd-3-clause | Chris7/django-invoice,Chris7/django-invoice,simonluijk/django-invoice | from django.shortcuts import get_object_or_404
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
Make sure user in logged in for showing invoice | from django.shortcuts import get_object_or_404
from django.contrib.auth.decorators import login_required
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
@login_required
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
| <commit_before>from django.shortcuts import get_object_or_404
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
<commit_msg>Make sure user in logged in for showing invoice<commit_after> | from django.shortcuts import get_object_or_404
from django.contrib.auth.decorators import login_required
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
@login_required
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
| from django.shortcuts import get_object_or_404
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
Make sure user in logged in for showing invoicefrom django.shortcuts import get_object_or_404
from django.contrib.auth.decorators import login_required
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
@login_required
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
| <commit_before>from django.shortcuts import get_object_or_404
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
<commit_msg>Make sure user in logged in for showing invoice<commit_after>from django.shortcuts import get_object_or_404
from django.contrib.auth.decorators import login_required
from invoice.models import Invoice
from invoice.pdf import draw_pdf
from invoice.utils import pdf_response
def pdf_view(request, pk):
invoice = get_object_or_404(Invoice, pk=pk)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
@login_required
def pdf_user_view(request, invoice_id):
invoice = get_object_or_404(Invoice, invoice_id=invoice_id, user=request.user)
return pdf_response(draw_pdf, invoice.file_name(), invoice)
|
4e40575147fd9af02c0e0a380e4d35f6c5d8f67a | polling_stations/apps/data_collection/management/commands/import_breckland.py | polling_stations/apps/data_collection/management/commands/import_breckland.py | from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
| from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
def station_record_to_dict(self, record):
"""
File supplied contained obviously inaccurate points
remove and fall back to geocoding
"""
if record.pollingplaceid in ['5151', '5370', '5418', '5319']:
record = record._replace(pollingplaceeasting = '0')
record = record._replace(pollingplacenorthing = '0')
return super().station_record_to_dict(record)
| Discard dodgy points in Breckland | Discard dodgy points in Breckland
These are clearly very wrong
use geocoding instead
| Python | bsd-3-clause | chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations | from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
Discard dodgy points in Breckland
These are clearly very wrong
use geocoding instead | from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
def station_record_to_dict(self, record):
"""
File supplied contained obviously inaccurate points
remove and fall back to geocoding
"""
if record.pollingplaceid in ['5151', '5370', '5418', '5319']:
record = record._replace(pollingplaceeasting = '0')
record = record._replace(pollingplacenorthing = '0')
return super().station_record_to_dict(record)
| <commit_before>from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
<commit_msg>Discard dodgy points in Breckland
These are clearly very wrong
use geocoding instead<commit_after> | from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
def station_record_to_dict(self, record):
"""
File supplied contained obviously inaccurate points
remove and fall back to geocoding
"""
if record.pollingplaceid in ['5151', '5370', '5418', '5319']:
record = record._replace(pollingplaceeasting = '0')
record = record._replace(pollingplacenorthing = '0')
return super().station_record_to_dict(record)
| from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
Discard dodgy points in Breckland
These are clearly very wrong
use geocoding insteadfrom data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
def station_record_to_dict(self, record):
"""
File supplied contained obviously inaccurate points
remove and fall back to geocoding
"""
if record.pollingplaceid in ['5151', '5370', '5418', '5319']:
record = record._replace(pollingplaceeasting = '0')
record = record._replace(pollingplacenorthing = '0')
return super().station_record_to_dict(record)
| <commit_before>from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
<commit_msg>Discard dodgy points in Breckland
These are clearly very wrong
use geocoding instead<commit_after>from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000143'
addresses_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
stations_name = 'May 2017/BrecklandPropertyPostCodePollingStationWebLookup-2017-02-20.TSV'
elections = [
'local.norfolk.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
def station_record_to_dict(self, record):
"""
File supplied contained obviously inaccurate points
remove and fall back to geocoding
"""
if record.pollingplaceid in ['5151', '5370', '5418', '5319']:
record = record._replace(pollingplaceeasting = '0')
record = record._replace(pollingplacenorthing = '0')
return super().station_record_to_dict(record)
|
31168c729ac486e6a0705e9c5117586dfb964cf7 | importer/loaders.py | importer/loaders.py | from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
| from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
| Move sheet_by_name after expected interfaces. | Move sheet_by_name after expected interfaces.
| Python | mit | monokrome/django-drift | from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
Move sheet_by_name after expected interfaces. | from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
| <commit_before>from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
<commit_msg>Move sheet_by_name after expected interfaces.<commit_after> | from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
| from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
Move sheet_by_name after expected interfaces.from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
| <commit_before>from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
<commit_msg>Move sheet_by_name after expected interfaces.<commit_after>from django.conf import settings
import xlrd
import os
base_loader_error = 'The Loader class can only be used by extending it.'
extensions = getattr(
settings,
'IMPORTER_EXTENSIONS',
{
'excel': ('.xls', '.xlsx'),
}
)
class Loader(object):
def __init__(self, file_info, autoload=True):
self.filename = file_info.path
if autoload is True:
return self.open()
def open(self):
raise NotImplementedError(base_loader_error)
def close(self):
pass
@classmethod
def sniff(cls, file_info):
raise NotImplementedError(base_loader_error)
class ExcelLoader(Loader):
supports_sheets = True
type_name = 'excel'
def open(self):
self.backend = xlrd.open_workbook(self.filename)
self.sheet_names = self.backend.sheet_names()
self.sheet_count = len(self.sheet_names)
def close(self):
self.backend.release_resources()
@classmethod
def sniff(cls, file_info):
# TODO: Find a way to really sniff the file.
if not 'excel' in extensions: return False
return os.path.splitext(file_info.path)[-1] in extensions['excel']
def sheet_by_name(self, name):
""" Returns a sheet based on it's name. """
return self.backend.sheet_by_name(name)
# TODO: Finish Loader for importing from CSV data.
class CSVLoader(Loader):
supports_sheets = False
|
4a20a36aa920a6450eb526a9913d8fb0ab08fa8c | buildlet/runner/simple.py | buildlet/runner/simple.py | from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
task.pre_run()
try:
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
| from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
# .. note:: Do *not* put ``cls.run(parent)`` in the next try
# block because the error in parent task is treated by its
# `post_error_run` hook.
task.pre_run()
try:
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
| Tweak SimpleRunner.run: make it close to the parallel one | Tweak SimpleRunner.run: make it close to the parallel one
| Python | bsd-3-clause | tkf/buildlet | from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
task.pre_run()
try:
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
Tweak SimpleRunner.run: make it close to the parallel one | from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
# .. note:: Do *not* put ``cls.run(parent)`` in the next try
# block because the error in parent task is treated by its
# `post_error_run` hook.
task.pre_run()
try:
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
| <commit_before>from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
task.pre_run()
try:
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
<commit_msg>Tweak SimpleRunner.run: make it close to the parallel one<commit_after> | from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
# .. note:: Do *not* put ``cls.run(parent)`` in the next try
# block because the error in parent task is treated by its
# `post_error_run` hook.
task.pre_run()
try:
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
| from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
task.pre_run()
try:
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
Tweak SimpleRunner.run: make it close to the parallel onefrom .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
# .. note:: Do *not* put ``cls.run(parent)`` in the next try
# block because the error in parent task is treated by its
# `post_error_run` hook.
task.pre_run()
try:
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
| <commit_before>from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
task.pre_run()
try:
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
<commit_msg>Tweak SimpleRunner.run: make it close to the parallel one<commit_after>from .base import BaseRunner
class SimpleRunner(BaseRunner):
"""
Simple blocking task runner.
"""
@classmethod
def run(cls, task):
"""
Simple blocking task runner.
Run `task` and its unfinished ancestors.
"""
for parent in task.get_parents():
# This is redundant because `.load` or `.run` is called
# for *all* tasks regardless the state (need rerun or not).
cls.run(parent)
# .. note:: Do *not* put ``cls.run(parent)`` in the next try
# block because the error in parent task is treated by its
# `post_error_run` hook.
task.pre_run()
try:
if task.is_finished():
task.load()
else:
task.run()
task.post_success_run()
except Exception as e:
task.post_error_run(e)
raise
|
7a1a90cbaba73da44efeaf385865519cfa078a6c | astropy/vo/samp/tests/test_hub_script.py | astropy/vo/samp/tests/test_hub_script.py | import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
| import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def setup_function(function):
function.sys_argv_orig = sys.argv
sys.argv = ["samp_hub"]
def teardown_function(function):
sys.argv = function.sys_argv_orig
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
| Fix isolation of SAMP hub script test. | Fix isolation of SAMP hub script test. | Python | bsd-3-clause | StuartLittlefair/astropy,dhomeier/astropy,saimn/astropy,funbaker/astropy,larrybradley/astropy,MSeifert04/astropy,kelle/astropy,lpsinger/astropy,bsipocz/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,lpsinger/astropy,lpsinger/astropy,astropy/astropy,funbaker/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,aleksandr-bakanov/astropy,tbabej/astropy,lpsinger/astropy,pllim/astropy,tbabej/astropy,stargaser/astropy,StuartLittlefair/astropy,joergdietrich/astropy,astropy/astropy,dhomeier/astropy,stargaser/astropy,AustereCuriosity/astropy,larrybradley/astropy,bsipocz/astropy,tbabej/astropy,StuartLittlefair/astropy,kelle/astropy,astropy/astropy,larrybradley/astropy,joergdietrich/astropy,mhvk/astropy,tbabej/astropy,funbaker/astropy,mhvk/astropy,stargaser/astropy,astropy/astropy,DougBurke/astropy,DougBurke/astropy,saimn/astropy,mhvk/astropy,funbaker/astropy,astropy/astropy,MSeifert04/astropy,pllim/astropy,joergdietrich/astropy,bsipocz/astropy,larrybradley/astropy,AustereCuriosity/astropy,stargaser/astropy,saimn/astropy,bsipocz/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,joergdietrich/astropy,pllim/astropy,MSeifert04/astropy,saimn/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,DougBurke/astropy,tbabej/astropy,saimn/astropy,kelle/astropy,kelle/astropy,pllim/astropy,larrybradley/astropy,mhvk/astropy,pllim/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,mhvk/astropy,joergdietrich/astropy,kelle/astropy | import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
Fix isolation of SAMP hub script test. | import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def setup_function(function):
function.sys_argv_orig = sys.argv
sys.argv = ["samp_hub"]
def teardown_function(function):
sys.argv = function.sys_argv_orig
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
| <commit_before>import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
<commit_msg>Fix isolation of SAMP hub script test.<commit_after> | import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def setup_function(function):
function.sys_argv_orig = sys.argv
sys.argv = ["samp_hub"]
def teardown_function(function):
sys.argv = function.sys_argv_orig
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
| import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
Fix isolation of SAMP hub script test.import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def setup_function(function):
function.sys_argv_orig = sys.argv
sys.argv = ["samp_hub"]
def teardown_function(function):
sys.argv = function.sys_argv_orig
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
| <commit_before>import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
<commit_msg>Fix isolation of SAMP hub script test.<commit_after>import sys
from ..hub_script import hub_script
from ..utils import ALLOW_INTERNET
def setup_module(module):
ALLOW_INTERNET.set(False)
def setup_function(function):
function.sys_argv_orig = sys.argv
sys.argv = ["samp_hub"]
def teardown_function(function):
sys.argv = function.sys_argv_orig
def test_hub_script():
sys.argv.append('-m') # run in multiple mode
sys.argv.append('-w') # disable web profile
hub_script(timeout=3)
|
44749393191aebf730c4dca17766fbdb713e636b | systempay/app.py | systempay/app.py | from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
# View for using PayPal as a payment method
# url(r'^handle-ipn/', self.redirect_view.as_view(as_payment_method=True),
# name='systempay-direct-payment'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
| from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
handle_ipn_view = views.HandleIPN
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
url(r'^handle-ipn/', self.handle_ipn_view.as_view(),
name='handle-ipn'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
| Add missing handle ipn url | Add missing handle ipn url
| Python | mit | bastien34/django-oscar-systempay,bastien34/django-oscar-systempay,dulaccc/django-oscar-systempay | from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
# View for using PayPal as a payment method
# url(r'^handle-ipn/', self.redirect_view.as_view(as_payment_method=True),
# name='systempay-direct-payment'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
Add missing handle ipn url | from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
handle_ipn_view = views.HandleIPN
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
url(r'^handle-ipn/', self.handle_ipn_view.as_view(),
name='handle-ipn'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
| <commit_before>from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
# View for using PayPal as a payment method
# url(r'^handle-ipn/', self.redirect_view.as_view(as_payment_method=True),
# name='systempay-direct-payment'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
<commit_msg>Add missing handle ipn url<commit_after> | from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
handle_ipn_view = views.HandleIPN
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
url(r'^handle-ipn/', self.handle_ipn_view.as_view(),
name='handle-ipn'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
| from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
# View for using PayPal as a payment method
# url(r'^handle-ipn/', self.redirect_view.as_view(as_payment_method=True),
# name='systempay-direct-payment'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
Add missing handle ipn urlfrom django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
handle_ipn_view = views.HandleIPN
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
url(r'^handle-ipn/', self.handle_ipn_view.as_view(),
name='handle-ipn'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
| <commit_before>from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
# View for using PayPal as a payment method
# url(r'^handle-ipn/', self.redirect_view.as_view(as_payment_method=True),
# name='systempay-direct-payment'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
<commit_msg>Add missing handle ipn url<commit_after>from django.conf.urls import patterns, url
from oscar.core.application import Application
from systempay import views
class SystemPayApplication(Application):
name = 'systempay'
place_order_view = views.PlaceOrderView
cancel_response_view = views.CancelResponseView
secure_redirect_view = views.SecureRedirectView
handle_ipn_view = views.HandleIPN
def __init__(self, *args, **kwargs):
super(SystemPayApplication, self).__init__(*args, **kwargs)
def get_urls(self):
urlpatterns = super(SystemPayApplication, self).get_urls()
urlpatterns += patterns('',
url(r'^secure-redirect/', self.secure_redirect_view.as_view(), name='secure-redirect'),
url(r'^preview/', self.place_order_view.as_view(preview=True),
name='preview'),
url(r'^cancel/', self.cancel_response_view.as_view(),
name='cancel-response'),
url(r'^place-order/', self.place_order_view.as_view(),
name='place-order'),
url(r'^handle-ipn/', self.handle_ipn_view.as_view(),
name='handle-ipn'),
)
return self.post_process_urls(urlpatterns)
application = SystemPayApplication()
|
3146aa3379f5c928fc7ba79776de19a1c64c7c01 | api/base/urls.py | api/base/urls.py | from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
| from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^v2/collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
| Make collections link go to v2 route | Make collections link go to v2 route
| Python | apache-2.0 | alexschiller/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,emetsger/osf.io,zachjanicki/osf.io,aaxelb/osf.io,binoculars/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,billyhunt/osf.io,doublebits/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,doublebits/osf.io,chrisseto/osf.io,abought/osf.io,mluke93/osf.io,crcresearch/osf.io,kwierman/osf.io,billyhunt/osf.io,mluke93/osf.io,leb2dg/osf.io,caseyrygt/osf.io,icereval/osf.io,alexschiller/osf.io,Nesiehr/osf.io,mattclark/osf.io,kwierman/osf.io,erinspace/osf.io,laurenrevere/osf.io,saradbowman/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,caneruguz/osf.io,amyshi188/osf.io,mfraezz/osf.io,wearpants/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,cslzchen/osf.io,mluke93/osf.io,adlius/osf.io,GageGaskins/osf.io,acshi/osf.io,emetsger/osf.io,adlius/osf.io,abought/osf.io,felliott/osf.io,brandonPurvis/osf.io,wearpants/osf.io,caseyrollins/osf.io,rdhyee/osf.io,alexschiller/osf.io,caseyrygt/osf.io,crcresearch/osf.io,abought/osf.io,samanehsan/osf.io,SSJohns/osf.io,amyshi188/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,amyshi188/osf.io,caseyrollins/osf.io,sloria/osf.io,pattisdr/osf.io,jnayak1/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,doublebits/osf.io,zamattiac/osf.io,baylee-d/osf.io,erinspace/osf.io,kch8qx/osf.io,cwisecarver/osf.io,jnayak1/osf.io,felliott/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,mluo613/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,caseyrygt/osf.io,kwierman/osf.io,pattisdr/osf.io,hmoco/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,hmoco/osf.io,danielneis/osf.io,samchrisinger/osf.io,ZobairAlijan/osf.io,chennan47/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,mluke93/osf.io,mluo613/osf.io,mluo613/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,mfraezz/osf.io,sloria/osf.io,icereval/osf.io,ticklemepierce/osf.io,wearpants/osf.io,felliott/osf.io,rdhyee/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,ticklemepierce/osf.io,doublebits/osf.io,acshi/osf.io,KAsante95/osf.io,KAsante95/osf.io,jnayak1/osf.io,danielneis/osf.io,zamattiac/osf.io,hmoco/osf.io,alexschiller/osf.io,RomanZWang/osf.io,samchrisinger/osf.io,billyhunt/osf.io,KAsante95/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,zamattiac/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,acshi/osf.io,TomHeatwole/osf.io,mluo613/osf.io,acshi/osf.io,sloria/osf.io,zamattiac/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,asanfilippo7/osf.io,ZobairAlijan/osf.io,samanehsan/osf.io,SSJohns/osf.io,baylee-d/osf.io,jnayak1/osf.io,kch8qx/osf.io,leb2dg/osf.io,billyhunt/osf.io,RomanZWang/osf.io,felliott/osf.io,SSJohns/osf.io,caseyrygt/osf.io,kwierman/osf.io,samchrisinger/osf.io,mattclark/osf.io,caneruguz/osf.io,GageGaskins/osf.io,zachjanicki/osf.io,ZobairAlijan/osf.io,adlius/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,chrisseto/osf.io,GageGaskins/osf.io,wearpants/osf.io,alexschiller/osf.io,erinspace/osf.io,ZobairAlijan/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,chennan47/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,leb2dg/osf.io,samanehsan/osf.io,zachjanicki/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,chennan47/osf.io,amyshi188/osf.io,caneruguz/osf.io,emetsger/osf.io,RomanZWang/osf.io,mfraezz/osf.io,aaxelb/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,Ghalko/osf.io,leb2dg/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,acshi/osf.io,GageGaskins/osf.io,baylee-d/osf.io,emetsger/osf.io,TomHeatwole/osf.io,cslzchen/osf.io,mattclark/osf.io,icereval/osf.io,monikagrabowska/osf.io,caseyrollins/osf.io,danielneis/osf.io,abought/osf.io,Ghalko/osf.io,KAsante95/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,binoculars/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,kch8qx/osf.io,mluo613/osf.io,binoculars/osf.io,samchrisinger/osf.io,adlius/osf.io,rdhyee/osf.io,chrisseto/osf.io,Nesiehr/osf.io,doublebits/osf.io,rdhyee/osf.io | from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
Make collections link go to v2 route | from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^v2/collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
| <commit_before>from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
<commit_msg>Make collections link go to v2 route<commit_after> | from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^v2/collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
| from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
Make collections link go to v2 routefrom django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^v2/collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
| <commit_before>from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
<commit_msg>Make collections link go to v2 route<commit_after>from django.conf import settings
from django.conf.urls import include, url, patterns
from django.conf.urls.static import static
from settings import API_BASE
from website.settings import DEV_MODE
from . import views
base_pattern = '^{}'.format(API_BASE)
urlpatterns = [
### API ###
url(base_pattern,
include(patterns('',
url(r'^$', views.root, name='root'),
url(r'^applications/', include('api.applications.urls', namespace='applications')),
url(r'^nodes/', include('api.nodes.urls', namespace='nodes')),
url(r'^users/', include('api.users.urls', namespace='users')),
url(r'^files/', include('api.files.urls', namespace='files')),
url(r'^docs/', include('rest_framework_swagger.urls')),
))
)
]
if DEV_MODE:
urlpatterns.extend([
url(r'^v2/collections/', include('api.collections.urls', namespace='collections')),
])
urlpatterns += static('/static/', document_root=settings.STATIC_ROOT)
handler404 = views.error_404
|
3d7e02e58353fdc3290440344efd5591d233f449 | bot/__init__.py | bot/__init__.py | import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
| import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
| Make the flask app easier to import | Make the flask app easier to import
| Python | mit | barentsen/AstroGoldStars,barentsen/AstroGoldStars | import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
Make the flask app easier to import | import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
| <commit_before>import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
<commit_msg>Make the flask app easier to import<commit_after> | import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
| import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
Make the flask app easier to importimport os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
| <commit_before>import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
<commit_msg>Make the flask app easier to import<commit_after>import os
PACKAGEDIR = os.path.abspath(os.path.dirname(__file__))
from .webapp import goldstarsapp
|
a53b0f540b8aac87f96743bf4d383607ad5a398f | setup.py | setup.py | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
| import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9,<2.0','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
| Fix install_requires to limit Django version <2.0. | Fix install_requires to limit Django version <2.0.
| Python | bsd-2-clause | ethoms/django-djaken,ethoms/django-djaken | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
Fix install_requires to limit Django version <2.0. | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9,<2.0','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
| <commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
<commit_msg>Fix install_requires to limit Django version <2.0.<commit_after> | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9,<2.0','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
| import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
Fix install_requires to limit Django version <2.0.import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9,<2.0','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
| <commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
<commit_msg>Fix install_requires to limit Django version <2.0.<commit_after>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-djaken',
version='2.0.1',
packages=['djaken'],
include_package_data=True,
license='BSD License',
description='Djaken is a complete web-based notes application for Django.',
long_description=README,
url='https://github.com/ethoms/django-djaken/',
author='Euan Thoms',
author_email='euan@potensol.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django>=1.9,<2.0','docutils>=0.12', 'pycrypto>=2.6', ],
keywords='django notes markdown encrypt',
)
|
b946768acb8c9e34dbb72cb6d3bc33a7e67f4548 | setup.py | setup.py | from distutils.core import setup
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
license='MIT',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
| from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
download_url='https://github.com/sashgorokhov/python-ninegag/archive/master.zip',
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
| Add download url and long description | Add download url and long description
| Python | mit | sashgorokhov/python-ninegag | from distutils.core import setup
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
license='MIT',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
Add download url and long description | from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
download_url='https://github.com/sashgorokhov/python-ninegag/archive/master.zip',
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
| <commit_before>from distutils.core import setup
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
license='MIT',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
<commit_msg>Add download url and long description<commit_after> | from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
download_url='https://github.com/sashgorokhov/python-ninegag/archive/master.zip',
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
| from distutils.core import setup
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
license='MIT',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
Add download url and long descriptionfrom distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
download_url='https://github.com/sashgorokhov/python-ninegag/archive/master.zip',
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
| <commit_before>from distutils.core import setup
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
license='MIT',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
<commit_msg>Add download url and long description<commit_after>from distutils.core import setup
with open('README.md') as file:
long_description = file.read()
setup(
requires=['beautifulsoup4', 'requests'],
name='python-ninegag',
version='0.1',
py_modules=['pyninegag'],
url='https://github.com/sashgorokhov/python-ninegag',
download_url='https://github.com/sashgorokhov/python-ninegag/archive/master.zip',
long_description=long_description,
license='MIT License',
author='sashgorokhov',
author_email='sashgorokhov@gmail.com',
description='Python library to get stuff from 9gag.com'
)
|
c136d5897a9c36cf5c4eca1d9a8c0be960ead730 | setup.py | setup.py | from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
| from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
python_requires=">=3.4",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
| Enforce requirement on python >= 3.4 | Enforce requirement on python >= 3.4
| Python | mit | axsemantics/rohrpost,axsemantics/rohrpost | from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
Enforce requirement on python >= 3.4 | from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
python_requires=">=3.4",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
| <commit_before>from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
<commit_msg>Enforce requirement on python >= 3.4<commit_after> | from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
python_requires=">=3.4",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
| from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
Enforce requirement on python >= 3.4from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
python_requires=">=3.4",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
| <commit_before>from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
<commit_msg>Enforce requirement on python >= 3.4<commit_after>from setuptools import setup
from rohrpost import __version__
def read(filepath):
with open(filepath, "r", encoding="utf-8") as f:
return f.read()
setup(
name="rohrpost",
version=__version__,
description="rohrpost WebSocket protocol for ASGI",
long_description=read("README.rst"),
url="https://github.com/axsemantics/rohrpost",
author="Tobias Kunze",
author_email="tobias.kunze@ax-semantics.com",
license="MIT",
python_requires=">=3.4",
classifiers=[
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["rohrpost"],
)
|
86d17b1898b5654aeeb432722dcb599136376236 | setup.py | setup.py | # Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.0',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
extras_require={
':python_version<"3.0"': ['doit==0.29.0'],
':python_version>="3.0"': ['doit==0.30.0'],
},
)
| # Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.1',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
install_requires=['doit'],
)
| Allow pip to decide which version of doit to install | Allow pip to decide which version of doit to install
| Python | mit | rbeagrie/wrapit | # Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.0',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
extras_require={
':python_version<"3.0"': ['doit==0.29.0'],
':python_version>="3.0"': ['doit==0.30.0'],
},
)
Allow pip to decide which version of doit to install | # Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.1',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
install_requires=['doit'],
)
| <commit_before># Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.0',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
extras_require={
':python_version<"3.0"': ['doit==0.29.0'],
':python_version>="3.0"': ['doit==0.30.0'],
},
)
<commit_msg>Allow pip to decide which version of doit to install<commit_after> | # Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.1',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
install_requires=['doit'],
)
| # Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.0',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
extras_require={
':python_version<"3.0"': ['doit==0.29.0'],
':python_version>="3.0"': ['doit==0.30.0'],
},
)
Allow pip to decide which version of doit to install# Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.1',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
install_requires=['doit'],
)
| <commit_before># Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.0',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
extras_require={
':python_version<"3.0"': ['doit==0.29.0'],
':python_version>="3.0"': ['doit==0.30.0'],
},
)
<commit_msg>Allow pip to decide which version of doit to install<commit_after># Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='wrapit',
version='0.3.1',
description='A task loader for doit that supports argparse console scripts',
long_description=long_description,
url='https://github.com/rbeagrie/wrapit',
author='Rob Beagrie',
author_email='rob@beagrie.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='doit development console_scripts build_tools',
packages=['wrapit'],
install_requires=['doit'],
)
|
a6d87b6e4dba63b0a74dc6173e90823bdb9fe070 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
| from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
| Add install_requires packages, at version 0.1.17 | Add install_requires packages, at version 0.1.17
| Python | mit | MBARIMike/biofloat,MBARIMike/biofloat,biofloat/biofloat,biofloat/biofloat | from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
Add install_requires packages, at version 0.1.17 | from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
| <commit_before>from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
<commit_msg>Add install_requires packages, at version 0.1.17<commit_after> | from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
| from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
Add install_requires packages, at version 0.1.17from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
| <commit_before>from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.1",
packages = find_packages(),
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
<commit_msg>Add install_requires packages, at version 0.1.17<commit_after>from setuptools import setup, find_packages
setup(
name = "biofloat",
version = "0.1.17",
packages = find_packages(),
requires = ['Python (>=2.7)'],
install_requires = [
'beautifulsoup4>=4.4',
'coverage>=4',
'jupyter>=1.0.0',
'matplotlib',
'numpy>=1.10',
'pandas>=0.17',
'Pydap',
'requests>=2.8',
'seawater>=3.3',
'simpletable>=0.2',
'xray>=0.6'
],
scripts = ['scripts/load_biofloat_cache.py'],
# metadata for upload to PyPI
author = "Mike McCann",
author_email = "mccann@mbari.org",
description = "Software for working with data from Bio-Argo floats",
license = "MIT",
keywords = "Oceanography Argo Bio-Argo drifting buoys floats",
url = "https://github.com/biofloat/biofloat",
)
|
5db334fac528494006755703d4563fd08dbfc4b2 | setup.py | setup.py | import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel.yaml >= 0.15.18",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
| import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel_yaml >= 0.11.14",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
| Switch to ruamel_yaml from conda by default | Switch to ruamel_yaml from conda by default
| Python | apache-2.0 | BBN-Q/Auspex,BBN-Q/Auspex,BBN-Q/Auspex,BBN-Q/Auspex | import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel.yaml >= 0.15.18",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
Switch to ruamel_yaml from conda by default | import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel_yaml >= 0.11.14",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
| <commit_before>import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel.yaml >= 0.15.18",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
<commit_msg>Switch to ruamel_yaml from conda by default<commit_after> | import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel_yaml >= 0.11.14",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
| import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel.yaml >= 0.15.18",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
Switch to ruamel_yaml from conda by defaultimport sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel_yaml >= 0.11.14",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
| <commit_before>import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel.yaml >= 0.15.18",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
<commit_msg>Switch to ruamel_yaml from conda by default<commit_after>import sys
from setuptools import setup
# See https://stackoverflow.com/questions/19534896/enforcing-python-version-in-setup-py
if sys.version_info < (3,6):
sys.exit("Sorry, Python < 3.6 is not supported by Auspex.")
setup(
name='auspex',
version='0.1',
author='auspex Developers',
package_dir={'':'src'},
packages=[
'auspex', 'auspex.instruments', 'auspex.filters', 'auspex.analysis'
],
scripts=[],
description='Automated system for python-based experiments.',
long_description=open('README.md').read(),
install_requires=[
"numpy >= 1.11.1",
"scipy >= 0.17.1",
"PyVISA >= 1.8",
"h5py >= 2.6.0",
"tqdm >= 4.7.0",
"pandas >= 0.18.1",
"networkx >= 1.11",
"matplotlib >= 2.0.0",
"ruamel_yaml >= 0.11.14",
"psutil >= 5.0.0",
"pyzmq >= 16.0.0"
]
)
|
17c18a610ba0fe35d88ffb4ca1b6d0c91ae4fad6 | setup.py | setup.py | from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson congitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
) | from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson cognitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
) | Fix incredibly lame spelling error. | Fix incredibly lame spelling error.
| Python | mit | ResidentMario/watson-graph,ResidentMario/watsongraph | from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson congitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
)Fix incredibly lame spelling error. | from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson cognitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
) | <commit_before>from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson congitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
)<commit_msg>Fix incredibly lame spelling error.<commit_after> | from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson cognitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
) | from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson congitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
)Fix incredibly lame spelling error.from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson cognitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
) | <commit_before>from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson congitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
)<commit_msg>Fix incredibly lame spelling error.<commit_after>from distutils.core import setup
setup(
name = 'watsongraph',
packages = ['watsongraph'], # this must be the same as the name above
install_requires=['networkx', 'requests', 'mwviews'],
version = '0.1.5',
description = 'Concept discovery and recommendation library built on top of the IBM Watson cognitive API.',
author = 'Aleksey Bilogur',
author_email = 'aleksey.bilogur@gmail.com',
url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
download_url = 'https://github.com/ResidentMario/watsongraph/tarball/0.1.5',
keywords = ['graph', 'networks', 'ibm watson', 'ibm', 'recommendation'], # arbitrary keywords
classifiers = [],
) |
11d1d3bc78ef734584065cc8fba9260da0b49bac | setup.py | setup.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0.dev0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
| Use regular version of openfisca-core | Use regular version of openfisca-core | Python | agpl-3.0 | openfisca/country-template,openfisca/country-template | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0.dev0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
Use regular version of openfisca-core | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
| <commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0.dev0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
<commit_msg>Use regular version of openfisca-core<commit_after> | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
| #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0.dev0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
Use regular version of openfisca-core#! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
| <commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0.dev0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
<commit_msg>Use regular version of openfisca-core<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name='OpenFisca-Country-Template',
version='0.1.0',
author='OpenFisca Team',
author_email='contact@openfisca.fr',
description=u'Template of a tax and benefit system for OpenFisca',
keywords='benefit microsimulation social tax',
license='http://www.fsf.org/licensing/licenses/agpl-3.0.html',
include_package_data = True, # Will read MANIFEST.in
install_requires=[
'OpenFisca-Core >= 6.1.0, < 7.0',
],
packages=find_packages(),
test_suite='nose.collector',
)
|
330bdcb3bc50acf4c15299a15fb7fdf8b58e152c | setup.py | setup.py | #!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description=open('README.rst').read(),
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Development Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
| #!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description='Bump (pin/manage) your dependency requirements with ease',
long_description=open('README.rst').read(),
url='https://github.com/maxzheng/bumper',
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
| Add long description / url | Add long description / url
| Python | mit | maxzheng/bumper | #!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description=open('README.rst').read(),
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Development Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
Add long description / url | #!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description='Bump (pin/manage) your dependency requirements with ease',
long_description=open('README.rst').read(),
url='https://github.com/maxzheng/bumper',
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
| <commit_before>#!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description=open('README.rst').read(),
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Development Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
<commit_msg>Add long description / url<commit_after> | #!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description='Bump (pin/manage) your dependency requirements with ease',
long_description=open('README.rst').read(),
url='https://github.com/maxzheng/bumper',
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
| #!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description=open('README.rst').read(),
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Development Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
Add long description / url#!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description='Bump (pin/manage) your dependency requirements with ease',
long_description=open('README.rst').read(),
url='https://github.com/maxzheng/bumper',
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
| <commit_before>#!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description=open('README.rst').read(),
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development :: Development Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
<commit_msg>Add long description / url<commit_after>#!/usr/bin/env python
import setuptools
setuptools.setup(
name='bumper',
version='0.1.1',
author='Max Zheng',
author_email='maxzheng.os @t gmail.com',
description='Bump (pin/manage) your dependency requirements with ease',
long_description=open('README.rst').read(),
url='https://github.com/maxzheng/bumper',
entry_points={
'console_scripts': [
'bump = bumper:bump',
],
},
install_requires=open('requirements.txt').read(),
license='MIT',
setup_requires=['setuptools-git'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Software Development',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='bump pin requirements requirements.txt pinned.txt',
)
|
e68ca88db996e817d607f51a25fabb47c8cf685c | setup.py | setup.py | import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
| import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
| Drop Python 2.5 support, add support for Python 3.2 | Drop Python 2.5 support, add support for Python 3.2
| Python | isc | gears/gears-less,gears/gears-less | import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
Drop Python 2.5 support, add support for Python 3.2 | import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
| <commit_before>import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Drop Python 2.5 support, add support for Python 3.2<commit_after> | import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
| import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
Drop Python 2.5 support, add support for Python 3.2import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
| <commit_before>import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Drop Python 2.5 support, add support for Python 3.2<commit_after>import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-less',
version='0.1',
url='https://github.com/gears/gears-less',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='LESS compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
|
9bf381993a6862a3413f5e1d3439ff1301260d62 | setup.py | setup.py | from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license="MIT",
keywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
| from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license='MIT',
keywords='pycurl multicurl curl network parsing grabbing scraping'
' lxml xpath data mining',
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
| Convert double quotes to single quotes | Convert double quotes to single quotes
| Python | mit | lorien/grab,lorien/grab,istinspring/grab,istinspring/grab | from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license="MIT",
keywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
Convert double quotes to single quotes | from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license='MIT',
keywords='pycurl multicurl curl network parsing grabbing scraping'
' lxml xpath data mining',
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
| <commit_before>from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license="MIT",
keywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
<commit_msg>Convert double quotes to single quotes<commit_after> | from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license='MIT',
keywords='pycurl multicurl curl network parsing grabbing scraping'
' lxml xpath data mining',
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
| from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license="MIT",
keywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
Convert double quotes to single quotesfrom setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license='MIT',
keywords='pycurl multicurl curl network parsing grabbing scraping'
' lxml xpath data mining',
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
| <commit_before>from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license="MIT",
keywords="pycurl multicurl curl network parsing grabbing scraping"
" lxml xpath data mining",
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
<commit_msg>Convert double quotes to single quotes<commit_after>from setuptools import setup, find_packages
import os
ROOT = os.path.dirname(os.path.realpath(__file__))
setup(
name='grab',
version='0.6.30',
description='Web Scraping Framework',
long_description=open(os.path.join(ROOT, 'README.rst')).read(),
url='http://grablib.org',
author='Gregory Petukhov',
author_email='lorien@lorien.name',
packages=find_packages(exclude=['test', 'test.files']),
install_requires=['lxml', 'pycurl', 'selection', 'weblib>=0.1.10', 'six',
'user_agent'],
license='MIT',
keywords='pycurl multicurl curl network parsing grabbing scraping'
' lxml xpath data mining',
classifiers=(
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'License :: OSI Approved :: MIT License',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet :: WWW/HTTP',
),
)
|
34de8fbae91b367ed1ada417c5d3cea669ccc1f3 | setup.py | setup.py | import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 2 - Pre-Alpha"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
| import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 4 - Beta"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
| Move classifier from Pre-Alpha to Beta | Move classifier from Pre-Alpha to Beta
| Python | bsd-3-clause | blizzardwarriorwx/SHARPpy,blizzardwarriorwx/SHARPpy,scollis/SHARPpy,scollis/SHARPpy,djgagne/SHARPpy,djgagne/SHARPpy,djgagne/SHARPpy,scollis/SHARPpy,djgagne/SHARPpy,blizzardwarriorwx/SHARPpy,blizzardwarriorwx/SHARPpy,scollis/SHARPpy | import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 2 - Pre-Alpha"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
Move classifier from Pre-Alpha to Beta | import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 4 - Beta"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
| <commit_before>import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 2 - Pre-Alpha"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
<commit_msg>Move classifier from Pre-Alpha to Beta<commit_after> | import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 4 - Beta"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
| import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 2 - Pre-Alpha"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
Move classifier from Pre-Alpha to Betaimport os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 4 - Beta"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
| <commit_before>import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 2 - Pre-Alpha"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
<commit_msg>Move classifier from Pre-Alpha to Beta<commit_after>import os, sys
from setuptools import setup, find_packages
pkgname = "SHARPpy"
### GET VERSION INFORMATION ###
setup_path = os.path.split(os.path.abspath(__file__))[0]
sys.path.append(os.path.join(setup_path, pkgname.lower()))
import version
version.write_git_version()
ver = version.get_version().split("+")[0]
sys.path.pop()
### ACTUAL SETUP VALUES ###
name = pkgname
version = ver
author = "Patrick Marsh, Kelton Halbert, and Greg Blumberg"
author_email = "patrick.marsh@noaa.gov, keltonhalbert@ou.edu, wblumberg@ou.edu"
description = "Sounding/Hodograph Analysis and Research Program for Python"
long_description = ""
license = "BSD"
keywords = "meteorology soundings analysis"
url = "https://github.com/sharppy/SHARPpy"
packages = find_packages()
package_data = {"": ["*.md", "*.txt", "*.png"],}
include_package_data = True
classifiers = ["Development Status :: 4 - Beta"]
setup(
name = name,
version = version,
author = author,
author_email = author_email,
description = description,
long_description = long_description,
license = license,
keywords = keywords,
url = url,
packages = packages,
package_data = package_data,
include_package_data = include_package_data,
classifiers = classifiers
)
|
1eba15e0c187b7afca85c43e031ce9e9e926e622 | setup.py | setup.py | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.16',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.permissions',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.17',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.tenants.migrations',
'tenant_users.permissions',
'tenant_users.permissions.migrations',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| Fix so migrations get included with pypi package. Update version | Fix so migrations get included with pypi package. Update version
| Python | mit | Corvia/django-tenant-users | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.16',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.permissions',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Fix so migrations get included with pypi package. Update version | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.17',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.tenants.migrations',
'tenant_users.permissions',
'tenant_users.permissions.migrations',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| <commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.16',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.permissions',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Fix so migrations get included with pypi package. Update version<commit_after> | import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.17',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.tenants.migrations',
'tenant_users.permissions',
'tenant_users.permissions.migrations',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.16',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.permissions',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Fix so migrations get included with pypi package. Update versionimport os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.17',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.tenants.migrations',
'tenant_users.permissions',
'tenant_users.permissions.migrations',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| <commit_before>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.16',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.permissions',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Fix so migrations get included with pypi package. Update version<commit_after>import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name='django-tenant-users',
version='0.2.17',
packages=[
'tenant_users',
'tenant_users.tenants',
'tenant_users.tenants.migrations',
'tenant_users.permissions',
'tenant_users.permissions.migrations',
],
include_package_data=True,
license='MIT License',
description='A Django app to extend django-tenant-schemas to incorporate global multi-tenant users',
long_description=README,
url='https://www.github.com/Corvia/django-tenant-users',
author='Corvia Technologies, LLC',
author_email='support@corvia.tech',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
6c42dbb721feeacdfce182914f86b62dfa7cb3b4 | setup.py | setup.py | from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser')
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
| from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser==3.5.0b2') # Using the beta for PyPy compatibility
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
| Upgrade configparser to fix pip build issue on PyPy. | Upgrade configparser to fix pip build issue on PyPy.
| Python | mpl-2.0 | arusahni/lolologist | from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser')
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
Upgrade configparser to fix pip build issue on PyPy. | from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser==3.5.0b2') # Using the beta for PyPy compatibility
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
| <commit_before>from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser')
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
<commit_msg>Upgrade configparser to fix pip build issue on PyPy.<commit_after> | from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser==3.5.0b2') # Using the beta for PyPy compatibility
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
| from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser')
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
Upgrade configparser to fix pip build issue on PyPy.from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser==3.5.0b2') # Using the beta for PyPy compatibility
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
| <commit_before>from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser')
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
<commit_msg>Upgrade configparser to fix pip build issue on PyPy.<commit_after>from setuptools import setup
import sys
REQUIREMENTS = [
'argparse',
'GitPython>=0.3.2.RC1',
'Pillow>=2.3.0',
'requests',
]
if sys.version_info <= (3,):
REQUIREMENTS.append('configparser==3.5.0b2') # Using the beta for PyPy compatibility
setup(name='lolologist',
version='0.4.0',
description='A utility that generates an image macro from your webcam whenever \
you commit to a git repository.',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'Programming Language :: Python :: 2.6',
'Environment :: Console',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Topic :: Multimedia :: Graphics :: Capture',
'Topic :: Utilities',
'Topic :: Software Development :: Version Control',
],
url='https://github.com/arusahni/lolologist',
author='Aru Sahni',
author_email='arusahni@gmail.com',
license='MPL 2.0',
packages=['lolologist'],
package_data={'lolologist':['LeagueGothic-Regular.otf', 'tranzlator.json']},
include_package_data=True,
install_requires=REQUIREMENTS,
entry_points = {
'console_scripts': ['lolologist=lolologist.lolologist:main'],
},
zip_safe=False)
|
d803647eeb1644985f1788d232a72587ff156bb4 | setup.py | setup.py | from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'Pykka >= 1.1',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| Add Pykka as a dependency | Add Pykka as a dependency
| Python | apache-2.0 | mopidy/mopidy-scrobbler,mthssdrbrg/mopidy-scrobbler | from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
Add Pykka as a dependency | from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'Pykka >= 1.1',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| <commit_before>from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
<commit_msg>Add Pykka as a dependency<commit_after> | from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'Pykka >= 1.1',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
Add Pykka as a dependencyfrom __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'Pykka >= 1.1',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
| <commit_before>from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
<commit_msg>Add Pykka as a dependency<commit_after>from __future__ import unicode_literals
import re
from setuptools import setup
def get_version(filename):
content = open(filename).read()
metadata = dict(re.findall("__([a-z]+)__ = '([^']+)'", content))
return metadata['version']
setup(
name='Mopidy-Scrobbler',
version=get_version('mopidy_scrobbler/__init__.py'),
url='https://github.com/mopidy/mopidy-scrobbler',
license='Apache License, Version 2.0',
author='Stein Magnus Jodal',
author_email='stein.magnus@jodal.no',
description='Mopidy extension for scrobbling played tracks to Last.fm',
long_description=open('README.rst').read(),
packages=['mopidy_scrobbler'],
zip_safe=False,
include_package_data=True,
install_requires=[
'setuptools',
'Mopidy',
'Pykka >= 1.1',
'pylast >= 0.5.7',
],
entry_points={
'mopidy.ext': [
'scrobbler = mopidy_scrobbler:Extension',
],
},
classifiers=[
'Environment :: No Input/Output (Daemon)',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Multimedia :: Sound/Audio :: Players',
],
)
|
b3d9696d02855ba7a3243b4dc10e931f22a587b8 | changes/utils/locking.py | changes/utils/locking.py | from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
| from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
raise
return wrapped
| Raise exception when we fail to grab a redis lock. | Raise exception when we fail to grab a redis lock.
Summary:
I might be missing something but it seems odd that we don't raise an error
when we fail to grab a lock and consequently not run the wrapped function.
We wrap all our tracked tasks with this lock function so failing to grab
the lock will appear as if the task finished successfully.
Reviewers: nate, josiah
Reviewed By: nate, josiah
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D192147
| Python | apache-2.0 | dropbox/changes,dropbox/changes,dropbox/changes,dropbox/changes | from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
Raise exception when we fail to grab a redis lock.
Summary:
I might be missing something but it seems odd that we don't raise an error
when we fail to grab a lock and consequently not run the wrapped function.
We wrap all our tracked tasks with this lock function so failing to grab
the lock will appear as if the task finished successfully.
Reviewers: nate, josiah
Reviewed By: nate, josiah
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D192147 | from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
raise
return wrapped
| <commit_before>from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
<commit_msg>Raise exception when we fail to grab a redis lock.
Summary:
I might be missing something but it seems odd that we don't raise an error
when we fail to grab a lock and consequently not run the wrapped function.
We wrap all our tracked tasks with this lock function so failing to grab
the lock will appear as if the task finished successfully.
Reviewers: nate, josiah
Reviewed By: nate, josiah
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D192147<commit_after> | from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
raise
return wrapped
| from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
Raise exception when we fail to grab a redis lock.
Summary:
I might be missing something but it seems odd that we don't raise an error
when we fail to grab a lock and consequently not run the wrapped function.
We wrap all our tracked tasks with this lock function so failing to grab
the lock will appear as if the task finished successfully.
Reviewers: nate, josiah
Reviewed By: nate, josiah
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D192147from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
raise
return wrapped
| <commit_before>from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
return wrapped
<commit_msg>Raise exception when we fail to grab a redis lock.
Summary:
I might be missing something but it seems odd that we don't raise an error
when we fail to grab a lock and consequently not run the wrapped function.
We wrap all our tracked tasks with this lock function so failing to grab
the lock will appear as if the task finished successfully.
Reviewers: nate, josiah
Reviewed By: nate, josiah
Subscribers: changesbot, kylec
Differential Revision: https://tails.corp.dropbox.com/D192147<commit_after>from flask import current_app
from functools import wraps
from hashlib import md5
from changes.ext.redis import UnableToGetLock
from changes.config import redis
def lock(func):
@wraps(func)
def wrapped(**kwargs):
key = '{0}:{1}:{2}'.format(
func.__module__,
func.__name__,
md5(
'&'.join('{0}={1}'.format(k, repr(v))
for k, v in sorted(kwargs.iteritems()))
).hexdigest()
)
try:
with redis.lock(key, expire=300, nowait=True):
return func(**kwargs)
except UnableToGetLock:
current_app.logger.warn('Unable to get lock for %s', key)
raise
return wrapped
|
0243af9efb3caf83268af35f9f2d80977e13afb1 | setup.py | setup.py | import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov'],
),
)
| import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
| Add webtest as a dependency. | Add webtest as a dependency.
| Python | bsd-3-clause | morepath/more.static | import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov'],
),
)
Add webtest as a dependency. | import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
| <commit_before>import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov'],
),
)
<commit_msg>Add webtest as a dependency.<commit_after> | import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
| import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov'],
),
)
Add webtest as a dependency.import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
| <commit_before>import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov'],
),
)
<commit_msg>Add webtest as a dependency.<commit_after>import os
from setuptools import setup, find_packages
long_description = (
open('README.rst').read()
+ '\n' +
open('CHANGES.txt').read())
setup(name='more.static',
version='0.1.dev0',
description="BowerStatic integration for Morepath",
long_description=long_description,
author="Martijn Faassen",
author_email="faassen@startifact.com",
keywords='morepath bowerstatic bower',
license="BSD",
url="http://pypi.python.org/pypi/more.static",
namespace_packages=['more'],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'morepath',
'bowerstatic',
],
extras_require = dict(
test=['pytest >= 2.0',
'pytest-cov',
'WebTest >= 2.0.14'],
),
)
|
684e2894a8c4da327410a8898e63df504780d7f8 | setup.py | setup.py | """Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
| """Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh',
'fuzzywuzzy',
'python-Levenshtein'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
| Add fuzzywuzzy and python-Levenshtein as depends | Add fuzzywuzzy and python-Levenshtein as depends | Python | mit | TylerKirby/cltk,cltk/cltk,mbevila/cltk,LBenzahia/cltk,diyclassics/cltk,kylepjohnson/cltk,D-K-E/cltk,LBenzahia/cltk,coderbhupendra/cltk,TylerKirby/cltk | """Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
Add fuzzywuzzy and python-Levenshtein as depends | """Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh',
'fuzzywuzzy',
'python-Levenshtein'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
| <commit_before>"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
<commit_msg>Add fuzzywuzzy and python-Levenshtein as depends<commit_after> | """Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh',
'fuzzywuzzy',
'python-Levenshtein'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
| """Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
Add fuzzywuzzy and python-Levenshtein as depends"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh',
'fuzzywuzzy',
'python-Levenshtein'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
| <commit_before>"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
<commit_msg>Add fuzzywuzzy and python-Levenshtein as depends<commit_after>"""Config for PyPI."""
from setuptools import find_packages
from setuptools import setup
setup(
author='Kyle P. Johnson',
author_email='kyle@kyle-p-johnson.com',
classifiers=[
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: Chinese (Traditional)',
'Natural Language :: English',
'Natural Language :: Greek',
'Natural Language :: Latin',
'Operating System :: POSIX',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Text Processing',
'Topic :: Text Processing :: General',
'Topic :: Text Processing :: Linguistic',
],
description='NLP for the ancient world',
install_requires=['nltk',
'gitpython',
'regex',
'whoosh',
'fuzzywuzzy',
'python-Levenshtein'],
keywords=['nlp', 'nltk', 'greek', 'latin'],
license='MIT',
long_description="The Classical Language Toolkit (CLTK) is a framework for natural language processing for Classical languages.", # pylint: disable=C0301
name='cltk',
packages=find_packages(),
url='https://github.com/cltk/cltk',
version='0.1.32',
zip_safe=True,
test_suite='cltk.tests.test_cltk',
)
|
c7689244b6de2cc9a01568e7cdab543cf8790214 | setup.py | setup.py | import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = ""
except WindowsError:
GIT_REVISION = ""
FULL_VERSION = '0.0.1dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION+GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
| import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
| Improve the version information string | ENH: Improve the version information string
| Python | bsd-3-clause | sahg/SAHGutils | import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = ""
except WindowsError:
GIT_REVISION = ""
FULL_VERSION = '0.0.1dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION+GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
ENH: Improve the version information string | import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
| <commit_before>import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = ""
except WindowsError:
GIT_REVISION = ""
FULL_VERSION = '0.0.1dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION+GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
<commit_msg>ENH: Improve the version information string<commit_after> | import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
| import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = ""
except WindowsError:
GIT_REVISION = ""
FULL_VERSION = '0.0.1dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION+GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
ENH: Improve the version information stringimport os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
| <commit_before>import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = ""
except WindowsError:
GIT_REVISION = ""
FULL_VERSION = '0.0.1dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION+GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
<commit_msg>ENH: Improve the version information string<commit_after>import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
|
c31bf6447f7b373ecbcc79e161dc406907887344 | commandcenter/app_test.py | commandcenter/app_test.py | from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
| from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
def test_new_artists_page(self):
response = self.fetch('/new-artists/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
self.assertIn('See new artists</h1>', body)
self.assertIn(
'This command will show you what artists are (supposedly) not yet in the database.',
body)
| Add another test to check that the "See new artists" page is basically rendered correctly | Add another test to check that the "See new artists" page is basically rendered correctly
| Python | apache-2.0 | chirpradio/command-center,chirpradio/command-center | from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
Add another test to check that the "See new artists" page is basically rendered correctly | from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
def test_new_artists_page(self):
response = self.fetch('/new-artists/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
self.assertIn('See new artists</h1>', body)
self.assertIn(
'This command will show you what artists are (supposedly) not yet in the database.',
body)
| <commit_before>from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
<commit_msg>Add another test to check that the "See new artists" page is basically rendered correctly<commit_after> | from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
def test_new_artists_page(self):
response = self.fetch('/new-artists/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
self.assertIn('See new artists</h1>', body)
self.assertIn(
'This command will show you what artists are (supposedly) not yet in the database.',
body)
| from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
Add another test to check that the "See new artists" page is basically rendered correctlyfrom tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
def test_new_artists_page(self):
response = self.fetch('/new-artists/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
self.assertIn('See new artists</h1>', body)
self.assertIn(
'This command will show you what artists are (supposedly) not yet in the database.',
body)
| <commit_before>from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
<commit_msg>Add another test to check that the "See new artists" page is basically rendered correctly<commit_after>from tornado.testing import AsyncHTTPTestCase
from tornado.websocket import websocket_connect
from . import app
class TestMyApp(AsyncHTTPTestCase):
def get_app(self):
return app.get_app()
def test_homepage(self):
response = self.fetch('/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
# Check that title is there.
self.assertIn('<h1>CHIRP Command Center</h1>', body)
# Check that links are present.
self.assertRegexpMatches(
body,
r'<a href="/new-artists/" class="[^"]+">See new artists</a>')
def test_new_artists_page(self):
response = self.fetch('/new-artists/')
self.assertEqual(response.code, 200)
body = response.body.decode('utf-8')
self.assertIn('See new artists</h1>', body)
self.assertIn(
'This command will show you what artists are (supposedly) not yet in the database.',
body)
|
bb951b655ca49b341fec3f6cb813f2b07c118696 | app/utils.py | app/utils.py | from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc | from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc
def get_redirect_target():
""" Returns the redirect target. """
for target in request.args.get("next"), request.referrer:
if not target:
continue
elif is_safe_url(target):
return target | Add utility function for getting the redirect target | Add utility function for getting the redirect target
| Python | mit | Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary | from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netlocAdd utility function for getting the redirect target | from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc
def get_redirect_target():
""" Returns the redirect target. """
for target in request.args.get("next"), request.referrer:
if not target:
continue
elif is_safe_url(target):
return target | <commit_before>from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc<commit_msg>Add utility function for getting the redirect target<commit_after> | from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc
def get_redirect_target():
""" Returns the redirect target. """
for target in request.args.get("next"), request.referrer:
if not target:
continue
elif is_safe_url(target):
return target | from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netlocAdd utility function for getting the redirect targetfrom urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc
def get_redirect_target():
""" Returns the redirect target. """
for target in request.args.get("next"), request.referrer:
if not target:
continue
elif is_safe_url(target):
return target | <commit_before>from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc<commit_msg>Add utility function for getting the redirect target<commit_after>from urllib.parse import urlparse, urljoin
from flask import request
def get_or_create(model, **kwargs):
""" Returns an instance of model and whether or not it already existed in a tuple. """
instance = model.query.filter_by(**kwargs).first()
if instance:
return instance, False
else:
instance = model(**kwargs)
return instance, True
def is_safe_url(target):
""" Checks if an URL is safe. """
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc
def get_redirect_target():
""" Returns the redirect target. """
for target in request.args.get("next"), request.referrer:
if not target:
continue
elif is_safe_url(target):
return target |
02faa67481e0902501e9e269c17351d9b8f05afa | tests/python/pipeline_util/test-export_.py | tests/python/pipeline_util/test-export_.py | #!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def main(testname):
if testname == 'import':
test_import()
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 4:
log("Error: Expected three arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
main(testname)
| #!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def test_simple_pipeline(path):
import os
from vistk.pipeline import pipeline
from vistk.pipeline import modules
from vistk.pipeline_util import bake
from vistk.pipeline_util import export_
modules.load_known_modules()
p = bake.bake_pipe_file(path)
_, w = os.pipe()
name = 'graph'
export_.export_dot(w, p, name)
def main(testname, path):
if testname == 'import':
test_import()
elif testname == 'simple_pipeline':
test_simple_pipeline(path)
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 5:
log("Error: Expected four arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
pipeline_dir = sys.argv[4]
path = os.path.join(pipeline_dir, '%s.pipe' % testname)
main(testname, path)
| Add a test for exporting to dot from python | Add a test for exporting to dot from python
| Python | bsd-3-clause | mathstuf/sprokit,Kitware/sprokit,Kitware/sprokit,mathstuf/sprokit,linus-sherrill/sprokit,linus-sherrill/sprokit,Kitware/sprokit,Kitware/sprokit,linus-sherrill/sprokit,mathstuf/sprokit,mathstuf/sprokit,linus-sherrill/sprokit | #!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def main(testname):
if testname == 'import':
test_import()
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 4:
log("Error: Expected three arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
main(testname)
Add a test for exporting to dot from python | #!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def test_simple_pipeline(path):
import os
from vistk.pipeline import pipeline
from vistk.pipeline import modules
from vistk.pipeline_util import bake
from vistk.pipeline_util import export_
modules.load_known_modules()
p = bake.bake_pipe_file(path)
_, w = os.pipe()
name = 'graph'
export_.export_dot(w, p, name)
def main(testname, path):
if testname == 'import':
test_import()
elif testname == 'simple_pipeline':
test_simple_pipeline(path)
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 5:
log("Error: Expected four arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
pipeline_dir = sys.argv[4]
path = os.path.join(pipeline_dir, '%s.pipe' % testname)
main(testname, path)
| <commit_before>#!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def main(testname):
if testname == 'import':
test_import()
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 4:
log("Error: Expected three arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
main(testname)
<commit_msg>Add a test for exporting to dot from python<commit_after> | #!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def test_simple_pipeline(path):
import os
from vistk.pipeline import pipeline
from vistk.pipeline import modules
from vistk.pipeline_util import bake
from vistk.pipeline_util import export_
modules.load_known_modules()
p = bake.bake_pipe_file(path)
_, w = os.pipe()
name = 'graph'
export_.export_dot(w, p, name)
def main(testname, path):
if testname == 'import':
test_import()
elif testname == 'simple_pipeline':
test_simple_pipeline(path)
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 5:
log("Error: Expected four arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
pipeline_dir = sys.argv[4]
path = os.path.join(pipeline_dir, '%s.pipe' % testname)
main(testname, path)
| #!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def main(testname):
if testname == 'import':
test_import()
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 4:
log("Error: Expected three arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
main(testname)
Add a test for exporting to dot from python#!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def test_simple_pipeline(path):
import os
from vistk.pipeline import pipeline
from vistk.pipeline import modules
from vistk.pipeline_util import bake
from vistk.pipeline_util import export_
modules.load_known_modules()
p = bake.bake_pipe_file(path)
_, w = os.pipe()
name = 'graph'
export_.export_dot(w, p, name)
def main(testname, path):
if testname == 'import':
test_import()
elif testname == 'simple_pipeline':
test_simple_pipeline(path)
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 5:
log("Error: Expected four arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
pipeline_dir = sys.argv[4]
path = os.path.join(pipeline_dir, '%s.pipe' % testname)
main(testname, path)
| <commit_before>#!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def main(testname):
if testname == 'import':
test_import()
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 4:
log("Error: Expected three arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
main(testname)
<commit_msg>Add a test for exporting to dot from python<commit_after>#!/usr/bin/env python
#ckwg +5
# Copyright 2011 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def log(msg):
import sys
sys.stderr.write("%s\n" % msg)
def test_import():
try:
import vistk.pipeline_util.export_
except:
log("Error: Failed to import the export_ module")
def test_simple_pipeline(path):
import os
from vistk.pipeline import pipeline
from vistk.pipeline import modules
from vistk.pipeline_util import bake
from vistk.pipeline_util import export_
modules.load_known_modules()
p = bake.bake_pipe_file(path)
_, w = os.pipe()
name = 'graph'
export_.export_dot(w, p, name)
def main(testname, path):
if testname == 'import':
test_import()
elif testname == 'simple_pipeline':
test_simple_pipeline(path)
else:
log("Error: No such test '%s'" % testname)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 5:
log("Error: Expected four arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
pipeline_dir = sys.argv[4]
path = os.path.join(pipeline_dir, '%s.pipe' % testname)
main(testname, path)
|
499defc47f0647afda47be8a8a25d04095b07e1b | nn/slmc/accuracy.py | nn/slmc/accuracy.py | import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
| import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.to_float(correct_prediction))
| Use to_float instead of cast | Use to_float instead of cast
| Python | unlicense | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
Use to_float instead of cast | import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.to_float(correct_prediction))
| <commit_before>import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
<commit_msg>Use to_float instead of cast<commit_after> | import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.to_float(correct_prediction))
| import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
Use to_float instead of castimport tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.to_float(correct_prediction))
| <commit_before>import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
<commit_msg>Use to_float instead of cast<commit_after>import tensorflow as tf
from ..util import static_shape, static_rank
def accuracy(output_layer, true_label):
assert static_rank(output_layer) == 2
#assert static_shape(output_layer)[0] == (batch size)
#assert static_shape(output_layer)[1] == (number of classes)
assert static_rank(true_label) == 1
#assert static_shape(true_label)[0] == (batch size)
assert static_shape(output_layer)[0] == static_shape(true_label)[0]
correct_prediction = tf.equal(tf.argmax(output_layer, 1), true_label)
return tf.reduce_mean(tf.to_float(correct_prediction))
|
edd4cfd5cf4102ab77e889c680306f25280e6165 | lingcod/bookmarks/forms.py | lingcod/bookmarks/forms.py | from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
| from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
ip = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
| Hide IP from input form | Hide IP from input form
| Python | bsd-3-clause | Alwnikrotikz/marinemap,google-code-export/marinemap,google-code-export/marinemap,Alwnikrotikz/marinemap,Alwnikrotikz/marinemap,Alwnikrotikz/marinemap,google-code-export/marinemap,google-code-export/marinemap | from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
Hide IP from input form | from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
ip = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
| <commit_before>from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
<commit_msg>Hide IP from input form<commit_after> | from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
ip = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
| from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
Hide IP from input formfrom lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
ip = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
| <commit_before>from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
<commit_msg>Hide IP from input form<commit_after>from lingcod.features.forms import FeatureForm
from lingcod.bookmarks.models import Bookmark
from django import forms
class BookmarkForm(FeatureForm):
name = forms.CharField(label='Bookmark Name')
latitude = forms.FloatField(widget=forms.HiddenInput())
longitude = forms.FloatField(widget=forms.HiddenInput())
altitude = forms.FloatField(widget=forms.HiddenInput())
heading = forms.FloatField(widget=forms.HiddenInput())
tilt = forms.FloatField(widget=forms.HiddenInput())
roll = forms.FloatField(widget=forms.HiddenInput())
altitudeMode = forms.FloatField(widget=forms.HiddenInput())
publicstate = forms.CharField(widget=forms.HiddenInput())
ip = forms.CharField(widget=forms.HiddenInput())
class Meta(FeatureForm.Meta):
model = Bookmark
|
51346025b638159c69fe2c8da85170784d065d60 | test_passgen.py | test_passgen.py | #!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
| #!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
| Add unit test for valid blacklist | Add unit test for valid blacklist
| Python | mit | Videonauth/passgen | #!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
Add unit test for valid blacklist | #!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
| <commit_before>#!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
<commit_msg>Add unit test for valid blacklist<commit_after> | #!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
| #!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
Add unit test for valid blacklist#!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
| <commit_before>#!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
<commit_msg>Add unit test for valid blacklist<commit_after>#!/usr/bin/env python3
import argparse
from passgen import make_parser, sanitize_input
import unittest
class PassGenTestCase(unittest.TestCase):
def setUp(self):
self.parse_args = make_parser().parse_args
def test_duplicate_flags(self):
for duplicate_flag in ['dd', 'll', 'uu', 'pp', 'ss']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', duplicate_flag]))
def test_no_valid_flags(self):
for invalid_flag in ['a', 'b', 'c']:
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-f', invalid_flag]))
def test_limit_lower_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '0']))
def test_limit_upper_bound(self):
with self.assertRaises(ValueError):
sanitize_input(self.parse_args(['-i', '9']))
def test_valid_flags(self):
for valid_flag in ['dl', 'du', 'dp', 'ds', 'dlu', 'dlup', 'dlups']:
dictionary = sanitize_input(self.parse_args(['-f', valid_flag]))
self.assertIsInstance(dictionary, argparse.Namespace)
def test_valid_blacklist(self):
dictionary = sanitize_input(self.parse_args(['-f', 'd', '-b', '012345678']))
self.assertIsInstance(dictionary, argparse.Namespace)
if __name__ == '__main__':
unittest.main(buffer=True)
|
6e8efdbb31c8713eeee0105ddafbd88d6286cfc9 | ganttcharts/cli/send_summary_emails.py | ganttcharts/cli/send_summary_emails.py | import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
email = emails.Summary(account, today)
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
| import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
try:
email = emails.Summary(account, today)
except RuntimeError: # no tasks
continue
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
| Add check for no tasks | Add check for no tasks
| Python | mit | thomasleese/gantt-charts,thomasleese/gantt-charts,thomasleese/gantt-charts,thomasleese/gantt-charts,thomasleese/gantt-charts | import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
email = emails.Summary(account, today)
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
Add check for no tasks | import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
try:
email = emails.Summary(account, today)
except RuntimeError: # no tasks
continue
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
| <commit_before>import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
email = emails.Summary(account, today)
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
<commit_msg>Add check for no tasks<commit_after> | import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
try:
email = emails.Summary(account, today)
except RuntimeError: # no tasks
continue
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
| import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
email = emails.Summary(account, today)
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
Add check for no tasksimport datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
try:
email = emails.Summary(account, today)
except RuntimeError: # no tasks
continue
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
| <commit_before>import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
email = emails.Summary(account, today)
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
<commit_msg>Add check for no tasks<commit_after>import datetime
import time
from .. import emails
from ..database import get_sql_connection
from ..models import Account, Session as SqlSession
__description__ = 'Send out summary emails.'
def send_out_emails():
session = SqlSession()
today = datetime.date.today()
accounts = session.query(Account) \
.filter(Account.receive_summary_email == True)
for account in accounts:
try:
email = emails.Summary(account, today)
except RuntimeError: # no tasks
continue
with emails.Mailer() as mailer:
mailer.send(email)
def command(args):
get_sql_connection()
if args.forever:
while True:
tomorrow = datetime.datetime.utcnow() + datetime.timedelta(days=1)
tomorrow = tomorrow.replace(hour=4, minute=0)
diff = tomorrow - datetime.datetime.utcnow()
time.sleep(diff.total_seconds())
send_out_emails()
else:
send_out_emails()
def add_subparser(subparsers):
parser = subparsers.add_parser('send-summary-emails', help=__description__)
parser.add_argument('--forever', action='store_true')
parser.set_defaults(func=command)
|
fbae1592e9a94a4b6faec0896abfc38acb06d5d2 | top40.py | top40.py | import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
response = requests.get(url)
print response.json()
| #/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
@click.command()
@click.option('--count',
default=10,
help='Number of songs to show. Maximum is 40')
def get_charts(count):
"""Prints the top COUNT songs in the UK Top 40 chart."""
response = requests.get(url).json()
data = response['entries'][:count]
for index, element in enumerate(data, start=1):
click.echo(
'{}. {} - {}'.format(
index,
element['title'],
element['artist'].encode('utf-8', 'replace')))
if __name__ == '__main__':
get_charts()
| Implement basic printing of the singles chart | Implement basic printing of the singles chart
| Python | mit | kevgathuku/top40,andela-kndungu/top40 | import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
response = requests.get(url)
print response.json()
Implement basic printing of the singles chart | #/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
@click.command()
@click.option('--count',
default=10,
help='Number of songs to show. Maximum is 40')
def get_charts(count):
"""Prints the top COUNT songs in the UK Top 40 chart."""
response = requests.get(url).json()
data = response['entries'][:count]
for index, element in enumerate(data, start=1):
click.echo(
'{}. {} - {}'.format(
index,
element['title'],
element['artist'].encode('utf-8', 'replace')))
if __name__ == '__main__':
get_charts()
| <commit_before>import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
response = requests.get(url)
print response.json()
<commit_msg>Implement basic printing of the singles chart<commit_after> | #/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
@click.command()
@click.option('--count',
default=10,
help='Number of songs to show. Maximum is 40')
def get_charts(count):
"""Prints the top COUNT songs in the UK Top 40 chart."""
response = requests.get(url).json()
data = response['entries'][:count]
for index, element in enumerate(data, start=1):
click.echo(
'{}. {} - {}'.format(
index,
element['title'],
element['artist'].encode('utf-8', 'replace')))
if __name__ == '__main__':
get_charts()
| import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
response = requests.get(url)
print response.json()
Implement basic printing of the singles chart#/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
@click.command()
@click.option('--count',
default=10,
help='Number of songs to show. Maximum is 40')
def get_charts(count):
"""Prints the top COUNT songs in the UK Top 40 chart."""
response = requests.get(url).json()
data = response['entries'][:count]
for index, element in enumerate(data, start=1):
click.echo(
'{}. {} - {}'.format(
index,
element['title'],
element['artist'].encode('utf-8', 'replace')))
if __name__ == '__main__':
get_charts()
| <commit_before>import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
response = requests.get(url)
print response.json()
<commit_msg>Implement basic printing of the singles chart<commit_after>#/usr/bin/env python
# -*- coding: utf-8 -*-
import click
import requests
url = 'http://ben-major.co.uk/labs/top40/api/singles/'
@click.command()
@click.option('--count',
default=10,
help='Number of songs to show. Maximum is 40')
def get_charts(count):
"""Prints the top COUNT songs in the UK Top 40 chart."""
response = requests.get(url).json()
data = response['entries'][:count]
for index, element in enumerate(data, start=1):
click.echo(
'{}. {} - {}'.format(
index,
element['title'],
element['artist'].encode('utf-8', 'replace')))
if __name__ == '__main__':
get_charts()
|
d60b16912cc3aa5c0a4f231b63b564683b2b8f64 | parameters/enums.py | parameters/enums.py | # -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
| # -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
| Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL | Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL
| Python | mit | magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3 | # -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL | # -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
| <commit_before># -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
<commit_msg>Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL<commit_after> | # -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
| # -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL# -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
| <commit_before># -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_PROTOCOL',
default='https',
kind='str',
verbose_name=_('default protocol: https'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
<commit_msg>Rename parameter DEFAULT_PROTOCOL to DEFAULT_URL_PROTOCOL<commit_after># -*- coding: utf-8 -*-
# standard library
import collections
# django
from django.utils.translation import ugettext_lazy as _
ParameterDefinition = collections.namedtuple(
'Parameter',
[
'name',
'default',
'kind',
'verbose_name',
]
)
class ParameterDefinitionList(object):
definitions = [
ParameterDefinition(
name='DEFAULT_URL_PROTOCOL',
default='https',
kind='str',
verbose_name=_('Default url protocol'),
),
]
choices = tuple((x.name, x.verbose_name) for x in definitions)
|
fcc2a190a50327a2349dfbb8e93d3157a6c1f1e8 | src/sentry/utils/versioning.py | src/sentry/utils/versioning.py | from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that all members of mapping hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
| from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
| Fix strange wording in version check docstring. | Fix strange wording in version check docstring.
| Python | bsd-3-clause | zenefits/sentry,BuildingLink/sentry,ifduyue/sentry,beeftornado/sentry,beeftornado/sentry,jean/sentry,jean/sentry,ifduyue/sentry,JackDanger/sentry,nicholasserra/sentry,gencer/sentry,fotinakis/sentry,mvaled/sentry,mvaled/sentry,daevaorn/sentry,jean/sentry,ifduyue/sentry,JamesMura/sentry,alexm92/sentry,daevaorn/sentry,fotinakis/sentry,imankulov/sentry,beeftornado/sentry,fotinakis/sentry,nicholasserra/sentry,mitsuhiko/sentry,zenefits/sentry,JackDanger/sentry,mvaled/sentry,gencer/sentry,gencer/sentry,jean/sentry,JamesMura/sentry,JamesMura/sentry,JackDanger/sentry,ifduyue/sentry,looker/sentry,gencer/sentry,mitsuhiko/sentry,imankulov/sentry,daevaorn/sentry,BuildingLink/sentry,nicholasserra/sentry,mvaled/sentry,BayanGroup/sentry,imankulov/sentry,BuildingLink/sentry,looker/sentry,BuildingLink/sentry,zenefits/sentry,alexm92/sentry,jean/sentry,fotinakis/sentry,alexm92/sentry,BuildingLink/sentry,BayanGroup/sentry,looker/sentry,zenefits/sentry,mvaled/sentry,gencer/sentry,JamesMura/sentry,zenefits/sentry,JamesMura/sentry,BayanGroup/sentry,looker/sentry,mvaled/sentry,daevaorn/sentry,ifduyue/sentry,looker/sentry | from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that all members of mapping hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
Fix strange wording in version check docstring. | from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
| <commit_before>from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that all members of mapping hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
<commit_msg>Fix strange wording in version check docstring.<commit_after> | from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
| from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that all members of mapping hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
Fix strange wording in version check docstring.from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
| <commit_before>from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that all members of mapping hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
<commit_msg>Fix strange wording in version check docstring.<commit_after>from __future__ import absolute_import
import warnings
from collections import namedtuple
from sentry.exceptions import InvalidConfiguration
class Version(namedtuple('Version', 'major minor patch')):
def __str__(self):
return '.'.join(map(str, self))
def make_upgrade_message(service, modality, version, hosts):
return '{service} {modality} be upgraded to {version} on {hosts}.'.format(
hosts=', '.join('{0} (currently {1})'.format(*i) for i in hosts),
modality=modality,
service=service,
version=version,
)
def check_versions(service, versions, required, recommended=None):
"""
Check that hosts fulfill version requirements.
:param service: service label, such as ``Redis``
:param versions: mapping of host to ``Version``
:param required: lowest supported ``Version``. If any host does not fulfill
this requirement, an ``InvalidConfiguration`` exception is raised.
:param recommended: recommended version. If any host does not fulfill this
requirement, a ``PendingDeprecationWarning`` is raised.
"""
must_upgrade = filter(lambda (host, version): required > version, versions.items())
if must_upgrade:
raise InvalidConfiguration(make_upgrade_message(service, 'must', required, must_upgrade))
if recommended:
should_upgrade = filter(lambda (host, version): recommended > version, versions.items())
if should_upgrade:
warnings.warn(
make_upgrade_message(service, 'should', recommended, should_upgrade),
PendingDeprecationWarning,
)
|
3e9fe3ac1f261d66ee9842fa507752754ae3d5aa | src/server.py | src/server.py | import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
| import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class PullRequest(Object):
def __init__(self, data):
self.data = data
def execute(self):
if self.data['action'] == 'opened':
self.execute_opened()
def execute_opened():
// TODO check PR and add message that this is under voting
pass
print(self.data)
print(self.data.keys)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def handle_pull_request(self, data):
pull_request = PullRequest(data)
pull_request.execute()
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
if header == 'pull_request':
self.handle_pull_request(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
| Handle pull_request and create object | Handle pull_request and create object
| Python | agpl-3.0 | TooAngel/democratic-collaboration,TooAngel/democratic-collaboration,TooAngel/democratic-collaboration | import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
Handle pull_request and create object | import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class PullRequest(Object):
def __init__(self, data):
self.data = data
def execute(self):
if self.data['action'] == 'opened':
self.execute_opened()
def execute_opened():
// TODO check PR and add message that this is under voting
pass
print(self.data)
print(self.data.keys)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def handle_pull_request(self, data):
pull_request = PullRequest(data)
pull_request.execute()
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
if header == 'pull_request':
self.handle_pull_request(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
| <commit_before>import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
<commit_msg>Handle pull_request and create object<commit_after> | import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class PullRequest(Object):
def __init__(self, data):
self.data = data
def execute(self):
if self.data['action'] == 'opened':
self.execute_opened()
def execute_opened():
// TODO check PR and add message that this is under voting
pass
print(self.data)
print(self.data.keys)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def handle_pull_request(self, data):
pull_request = PullRequest(data)
pull_request.execute()
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
if header == 'pull_request':
self.handle_pull_request(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
| import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
Handle pull_request and create objectimport os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class PullRequest(Object):
def __init__(self, data):
self.data = data
def execute(self):
if self.data['action'] == 'opened':
self.execute_opened()
def execute_opened():
// TODO check PR and add message that this is under voting
pass
print(self.data)
print(self.data.keys)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def handle_pull_request(self, data):
pull_request = PullRequest(data)
pull_request.execute()
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
if header == 'pull_request':
self.handle_pull_request(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
| <commit_before>import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
<commit_msg>Handle pull_request and create object<commit_after>import os
from flask import Flask, request
from flask.ext import restful # @UnresolvedImport
app = Flask(
__name__,
template_folder='../templates',
static_folder='../static'
)
api = restful.Api(app)
class PullRequest(Object):
def __init__(self, data):
self.data = data
def execute(self):
if self.data['action'] == 'opened':
self.execute_opened()
def execute_opened():
// TODO check PR and add message that this is under voting
pass
print(self.data)
print(self.data.keys)
class Github(restful.Resource):
def handle_push(self, data):
print(data)
def handle_pull_request(self, data):
pull_request = PullRequest(data)
pull_request.execute()
def post(self):
data = request.json
header = request.headers['X-GitHub-Event']
if header == 'push':
self.handle_push(data)
return true
if header == 'pull_request':
self.handle_pull_request(data)
return true
print(header)
print(data)
api.add_resource(Github, '/github/')
if __name__ == '__main__':
app.debug = os.getenv('DEBUG', 'false').lower() == 'true'
app.run(host='0.0.0.0', port=int(os.getenv("PORT", 5001)))
|
87a07437c2481f92286f01f988405b4f3cfc5d37 | apps/accounts/models.py | apps/accounts/models.py | from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
| from apps.teilar.models import Departments
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.ForeignKey(Departments)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
| Make school field foreign key of Departments table | Make school field foreign key of Departments table
| Python | agpl-3.0 | LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr | from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
Make school field foreign key of Departments table | from apps.teilar.models import Departments
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.ForeignKey(Departments)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
| <commit_before>from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
<commit_msg>Make school field foreign key of Departments table<commit_after> | from apps.teilar.models import Departments
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.ForeignKey(Departments)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
| from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
Make school field foreign key of Departments tablefrom apps.teilar.models import Departments
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.ForeignKey(Departments)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
| <commit_before>from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.CharField(max_length = 5)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
<commit_msg>Make school field foreign key of Departments table<commit_after>from apps.teilar.models import Departments
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.ForeignKey(User, unique = True)
dionysos_username = models.CharField(max_length = 15, unique = True)
dionysos_password = models.CharField(max_length = 30)
eclass_username = models.CharField(max_length = 30, null = True, blank = True)
eclass_password = models.CharField(max_length = 30, null = True, blank = True)
eclass_lessons = models.TextField(null = True, blank = True)
introduction_year = models.CharField(max_length = 5)
registration_number = models.CharField(max_length = 8)
school = models.ForeignKey(Departments)
semester = models.CharField(max_length = 2)
webmail_username = models.CharField(max_length = 30, null = True, blank = True)
webmail_password = models.CharField(max_length = 30, null = True, blank = True)
teacher_announcements = models.TextField(null = True, blank = True)
other_announcements = models.TextField(null = True, blank = True)
declaration = models.TextField(null = True, blank = True)
grades = models.TextField(null = True, blank = True)
def __unicode__(self):
return self.user.username
|
38bdff4d1d3bbf2416c6fe75036bf23d270f15a2 | plinth/__init__.py | plinth/__init__.py | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth package init file
"""
__version__ = '0.3.2'
| Make plinth directory a package, add version | Make plinth directory a package, add version
| Python | agpl-3.0 | vignanl/Plinth,harry-7/Plinth,vignanl/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,kkampardi/Plinth,jvalleroy/plinth-debian,jvalleroy/plinth-debian,jvalleroy/plinth-debian,vignanl/Plinth,kkampardi/Plinth,kkampardi/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,vignanl/Plinth,kkampardi/Plinth,harry-7/Plinth,jvalleroy/plinth-debian,vignanl/Plinth,jvalleroy/plinth-debian,freedomboxtwh/Plinth,harry-7/Plinth | Make plinth directory a package, add version | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth package init file
"""
__version__ = '0.3.2'
| <commit_before><commit_msg>Make plinth directory a package, add version<commit_after> | #
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth package init file
"""
__version__ = '0.3.2'
| Make plinth directory a package, add version#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth package init file
"""
__version__ = '0.3.2'
| <commit_before><commit_msg>Make plinth directory a package, add version<commit_after>#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth package init file
"""
__version__ = '0.3.2'
| |
40bb8f55011f3426058313075cbb5f04cd9bfb97 | main/translations.py | main/translations.py | from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy("Person name", "Name")
| from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy('Person name', 'Name')
| Fix formatting: use single quotes | Fix formatting: use single quotes
| Python | agpl-3.0 | Davidyuk/witcoin,Davidyuk/witcoin | from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy("Person name", "Name")
Fix formatting: use single quotes | from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy('Person name', 'Name')
| <commit_before>from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy("Person name", "Name")
<commit_msg>Fix formatting: use single quotes<commit_after> | from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy('Person name', 'Name')
| from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy("Person name", "Name")
Fix formatting: use single quotesfrom django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy('Person name', 'Name')
| <commit_before>from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy("Person name", "Name")
<commit_msg>Fix formatting: use single quotes<commit_after>from django.utils.translation import ungettext_lazy, ugettext_lazy, pgettext_lazy
"""
Removing this code causes makemessages to comment out those PO entries, so don't do that
unless you find a better way to do this
http://stackoverflow.com/questions/7625991/how-to-properly-add-entries-for-computed-values-to-the-django-internationalizati
http://stackoverflow.com/questions/7878028/override-default-django-translations
"""
ungettext_lazy('%d week', '%d weeks')
ungettext_lazy('%d minute', '%d minutes')
ugettext_lazy('Comment')
ugettext_lazy('Threaded comment')
pgettext_lazy('Person name', 'Name')
|
85db39e36c99e800e1008605213d1c25108b035d | angr/paths.py | angr/paths.py | import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
| import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
| Allow specifying jumpkind with creating a Path via PathGenerator.blank_path() | Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()
| Python | bsd-2-clause | angr/angr,GuardianRG/angr,iamahuman/angr,cureHsu/angr,tyb0807/angr,mingderwang/angr,fjferrer/angr,angr/angr,zhuyue1314/angr,axt/angr,cureHsu/angr,chubbymaggie/angr,schieb/angr,lowks/angr,fjferrer/angr,zhuyue1314/angr,schieb/angr,chubbymaggie/angr,GuardianRG/angr,axt/angr,mingderwang/angr,avain/angr,schieb/angr,angr/angr,lowks/angr,haylesr/angr,iamahuman/angr,axt/angr,iamahuman/angr,f-prettyland/angr,xurantju/angr,chubbymaggie/angr,tyb0807/angr,xurantju/angr,avain/angr,f-prettyland/angr,haylesr/angr,terry2012/angr,tyb0807/angr,terry2012/angr,f-prettyland/angr | import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
Allow specifying jumpkind with creating a Path via PathGenerator.blank_path() | import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
| <commit_before>import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
<commit_msg>Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()<commit_after> | import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
| import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
| <commit_before>import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
<commit_msg>Allow specifying jumpkind with creating a Path via PathGenerator.blank_path()<commit_after>import logging
l = logging.getLogger('angr.states')
class PathGenerator(object):
def __init__(self, project):
self._project = project
def blank_path(self, state=None, jumpkind='Ijk_Boring', *args, **kwargs):
'''
blank_point - Returns a start path, representing a clean start of symbolic execution.
'''
s = self._project.state_generator.blank_state(*args, **kwargs) if state is None else state
return Path(self._project, s, jumpkind=jumpkind)
def entry_point(self, state=None, *args, **kwargs):
'''
entry_point - Returns a path reflecting the processor when execution
reaches the binary's entry point.
'''
s = self._project.state_generator.entry_point(*args, **kwargs) if state is None else state
return Path(self._project, s)
from .path import Path
|
6ae83f01eacceb140435e72a216fa88bd97f2b0c | pyswarms/utils/console_utils.py | pyswarms/utils/console_utils.py | # -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
def cli_print(message, verbosity, threshold):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
"""
if verbosity >= threshold:
print(message)
else:
pass
def end_report(cost, pos, verbosity):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
print(template) | # -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
# Import from __future__
from __future__ import with_statement
from __future__ import absolute_import
from __future__ import print_function
# Import modules
import logging
def cli_print(message, verbosity, threshold, logger):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
logger : logging.getLogger
logger instance
"""
if verbosity >= threshold:
logger.info(message)
else:
pass
def end_report(cost, pos, verbosity, logger):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
logger : logging.getLogger
logger instance
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
logger.info(template) | Add support for logging module | Add support for logging module
This package now prints using the logging module. It can still print
onto the console, but an additional tag like INFO, DEBUG, etc. are now
being used.
Author: ljvmiranda921
| Python | mit | ljvmiranda921/pyswarms,ljvmiranda921/pyswarms | # -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
def cli_print(message, verbosity, threshold):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
"""
if verbosity >= threshold:
print(message)
else:
pass
def end_report(cost, pos, verbosity):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
print(template)Add support for logging module
This package now prints using the logging module. It can still print
onto the console, but an additional tag like INFO, DEBUG, etc. are now
being used.
Author: ljvmiranda921 | # -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
# Import from __future__
from __future__ import with_statement
from __future__ import absolute_import
from __future__ import print_function
# Import modules
import logging
def cli_print(message, verbosity, threshold, logger):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
logger : logging.getLogger
logger instance
"""
if verbosity >= threshold:
logger.info(message)
else:
pass
def end_report(cost, pos, verbosity, logger):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
logger : logging.getLogger
logger instance
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
logger.info(template) | <commit_before># -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
def cli_print(message, verbosity, threshold):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
"""
if verbosity >= threshold:
print(message)
else:
pass
def end_report(cost, pos, verbosity):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
print(template)<commit_msg>Add support for logging module
This package now prints using the logging module. It can still print
onto the console, but an additional tag like INFO, DEBUG, etc. are now
being used.
Author: ljvmiranda921<commit_after> | # -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
# Import from __future__
from __future__ import with_statement
from __future__ import absolute_import
from __future__ import print_function
# Import modules
import logging
def cli_print(message, verbosity, threshold, logger):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
logger : logging.getLogger
logger instance
"""
if verbosity >= threshold:
logger.info(message)
else:
pass
def end_report(cost, pos, verbosity, logger):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
logger : logging.getLogger
logger instance
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
logger.info(template) | # -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
def cli_print(message, verbosity, threshold):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
"""
if verbosity >= threshold:
print(message)
else:
pass
def end_report(cost, pos, verbosity):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
print(template)Add support for logging module
This package now prints using the logging module. It can still print
onto the console, but an additional tag like INFO, DEBUG, etc. are now
being used.
Author: ljvmiranda921# -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
# Import from __future__
from __future__ import with_statement
from __future__ import absolute_import
from __future__ import print_function
# Import modules
import logging
def cli_print(message, verbosity, threshold, logger):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
logger : logging.getLogger
logger instance
"""
if verbosity >= threshold:
logger.info(message)
else:
pass
def end_report(cost, pos, verbosity, logger):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
logger : logging.getLogger
logger instance
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
logger.info(template) | <commit_before># -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
def cli_print(message, verbosity, threshold):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
"""
if verbosity >= threshold:
print(message)
else:
pass
def end_report(cost, pos, verbosity):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
print(template)<commit_msg>Add support for logging module
This package now prints using the logging module. It can still print
onto the console, but an additional tag like INFO, DEBUG, etc. are now
being used.
Author: ljvmiranda921<commit_after># -*- coding: utf-8 -*-
""" console_utils.py: various tools for printing into console """
# Import from __future__
from __future__ import with_statement
from __future__ import absolute_import
from __future__ import print_function
# Import modules
import logging
def cli_print(message, verbosity, threshold, logger):
"""Helper function to print console output
Parameters
----------
message : str
the message to be printed into the console
verbosity : int
verbosity setting of the user
threshold : int
threshold for printing
logger : logging.getLogger
logger instance
"""
if verbosity >= threshold:
logger.info(message)
else:
pass
def end_report(cost, pos, verbosity, logger):
"""Helper function to print a simple report at the end of the
run. This always has a threshold of 1.
Parameters
----------
cost : float
final cost from the optimization procedure.
pos : numpy.ndarray or list
best position found
verbosity : int
verbosity setting of the user.
logger : logging.getLogger
logger instance
"""
# Cuts the length of the best position if it's too long
if len(list(pos)) > 3:
out = ('[ ' + 3 * '{:3f} ' + '...]').format(*list(pos))
else:
out = list(pos)
template = ('================================\n'
'Optimization finished!\n'
'Final cost: {:06.4f}\n'
'Best value: {}\n').format(cost, out)
if verbosity >= 1:
logger.info(template) |
5f30d91d35d090e28925613365d5d1f31f0259d2 | daapserver/bonjour.py | daapserver/bonjour.py | import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": server.password is not None,
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + ".daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
| import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": int(bool(server.password)),
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + "._daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
| Fix for broken zeroconf publishing. | Fix for broken zeroconf publishing.
| Python | mit | ties/flask-daapserver,basilfx/flask-daapserver | import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": server.password is not None,
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + ".daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
Fix for broken zeroconf publishing. | import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": int(bool(server.password)),
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + "._daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
| <commit_before>import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": server.password is not None,
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + ".daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
<commit_msg>Fix for broken zeroconf publishing.<commit_after> | import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": int(bool(server.password)),
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + "._daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
| import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": server.password is not None,
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + ".daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
Fix for broken zeroconf publishing.import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": int(bool(server.password)),
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + "._daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
| <commit_before>import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": server.password is not None,
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + ".daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
<commit_msg>Fix for broken zeroconf publishing.<commit_after>import zeroconf
import socket
class Bonjour(object):
"""
"""
def __init__(self):
"""
"""
self.zeroconf = zeroconf.Zeroconf()
self.servers = {}
def publish(self, server):
"""
"""
if server in self.servers:
self.unpublish(server)
ip = "127.0.0.1" if server.ip == "0.0.0.0" else server.ip
description = {
"txtvers": 1,
"Password": int(bool(server.password)),
"Machine Name": server.server_name
}
self.servers[server] = zeroconf.ServiceInfo(
"_daap._tcp.local.", server.server_name + "._daap._tcp.local.",
socket.inet_aton(ip), server.port, 0, 0,
description)
self.zeroconf.register_service(self.servers[server])
def unpublish(self, server):
"""
"""
if server not in self.servers:
return
self.zeroconf.unregister_service(self.servers[server])
del self.servers[server]
def close(self):
"""
"""
self.zeroconf.close()
|
77cdf4de05b3edfe3231ffd831af38b290b178a1 | signals.py | signals.py | from django.core.signals import Signal
process_completed = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
| from django.core.signals import Signal
process_finished = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
| Add a little doc and callbacks | Add a little doc and callbacks
| Python | bsd-3-clause | hydroshare/django_docker_processes,JeffHeard/django_docker_processes | from django.core.signals import Signal
process_completed = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
Add a little doc and callbacks | from django.core.signals import Signal
process_finished = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
| <commit_before>from django.core.signals import Signal
process_completed = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
<commit_msg>Add a little doc and callbacks<commit_after> | from django.core.signals import Signal
process_finished = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
| from django.core.signals import Signal
process_completed = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
Add a little doc and callbacksfrom django.core.signals import Signal
process_finished = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
| <commit_before>from django.core.signals import Signal
process_completed = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
<commit_msg>Add a little doc and callbacks<commit_after>from django.core.signals import Signal
process_finished = Signal(providing_args=['result_text', 'result_data', 'files', 'profile','logs'])
process_aborted = Signal(providing_args=['error_text','result_data','profile','logs'])
|
5256408a2a5d7a4c5562962a590487b39da3dcff | core/helpers.py | core/helpers.py | from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('lang')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
| from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('language')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
| Use 'language' for query string instead of 'lang' | Use 'language' for query string instead of 'lang'
| Python | mit | uktrade/directory-ui-supplier,uktrade/directory-ui-supplier,uktrade/directory-ui-supplier | from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('lang')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
Use 'language' for query string instead of 'lang' | from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('language')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
| <commit_before>from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('lang')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
<commit_msg>Use 'language' for query string instead of 'lang'<commit_after> | from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('language')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
| from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('lang')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
Use 'language' for query string instead of 'lang'from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('language')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
| <commit_before>from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('lang')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
<commit_msg>Use 'language' for query string instead of 'lang'<commit_after>from django.shortcuts import Http404
from django.utils import translation
def handle_cms_response(response):
if response.status_code == 404:
raise Http404()
response.raise_for_status()
return response.json()
def get_language_from_querystring(request):
language_code = request.GET.get('language')
language_codes = translation.trans_real.get_languages()
if language_code and language_code in language_codes:
return language_code
|
e866e196f72f9965dfbca1f7628c8b90e5c627dc | CI/syntaxCheck.py | CI/syntaxCheck.py | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| Fix the location path of OpenIPSL | Fix the location path of OpenIPSL
| Python | bsd-3-clause | OpenIPSL/OpenIPSL,SmarTS-Lab/OpenIPSL,tinrabuzin/OpenIPSL,SmarTS-Lab/OpenIPSL | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
Fix the location path of OpenIPSL | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| <commit_before>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
<commit_msg>Fix the location path of OpenIPSL<commit_after> | import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
Fix the location path of OpenIPSLimport sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
| <commit_before>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
<commit_msg>Fix the location path of OpenIPSL<commit_after>import sys
from CITests import CITests
# Libs in Application Examples
appExamples = {
#"KundurSMIB":"/ApplicationExamples/KundurSMIB/package.mo",
#"TwoAreas":"/ApplicationExamples/TwoAreas/package.mo",
#"SevenBus":"/ApplicationExamples/SevenBus/package.mo",
#"IEEE9":"/ApplicationExamples/IEEE9/package.mo",
#"IEEE14":"/ApplicationExamples/IEEE14/package.mo",
#"AKD":"/ApplicationExamples/AKD/package.mo",
#"N44":"/ApplicationExamples/N44/package.mo",
#"OpenCPSD5d3B":"/ApplicationExamples/OpenCPSD5d3B/package.mo",
#"RaPIdExperiments":"/ApplicationExamples/RaPIdExperiments/package.mo"
}
# Instance of CITests
ci = CITests("/OpenIPSL")
# Run Check on OpenIPSL
passLib = ci.runSyntaxCheck("OpenIPSL","/OpenIPSL/OpenIPSL/package.mo")
if not passLib:
# Error in OpenIPSL
sys.exit(1)
else:
# Run Check on App Examples
passAppEx = 1
for package in appExamples.keys():
passAppEx = passAppEx * ci.runSyntaxCheck(package,appExamples[package])
# The tests are failing if the number of failed check > 0
if passAppEx:
# Everything is fine
sys.exit(0)
else:
# Exit with error
sys.exit(1)
|
eed21e06dbb61899e9c14352750258aa81cc5d3c | doc/en/example/nonpython/conftest.py | doc/en/example/nonpython/conftest.py | # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in raw.items():
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in self.spec.items():
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
| # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in sorted(raw.items()):
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
| Sort yml items to get same results for regendoc runs | Sort yml items to get same results for regendoc runs
| Python | mit | The-Compiler/pytest,pfctdayelise/pytest,malinoff/pytest,tomviner/pytest,hackebrot/pytest,rmfitzpatrick/pytest,nicoddemus/pytest,tomviner/pytest,RonnyPfannschmidt/pytest,skylarjhdownes/pytest,etataurov/pytest,hpk42/pytest,alfredodeza/pytest,nicoddemus/pytest,markshao/pytest,ddboline/pytest,hpk42/pytest,jaraco/pytest,The-Compiler/pytest,txomon/pytest,eli-b/pytest,vmalloc/dessert,davidszotten/pytest,Akasurde/pytest,MichaelAquilina/pytest,tareqalayan/pytest,tgoodlet/pytest,flub/pytest,pytest-dev/pytest | # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in raw.items():
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in self.spec.items():
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
Sort yml items to get same results for regendoc runs | # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in sorted(raw.items()):
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
| <commit_before># content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in raw.items():
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in self.spec.items():
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
<commit_msg>Sort yml items to get same results for regendoc runs<commit_after> | # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in sorted(raw.items()):
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
| # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in raw.items():
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in self.spec.items():
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
Sort yml items to get same results for regendoc runs# content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in sorted(raw.items()):
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
| <commit_before># content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in raw.items():
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in self.spec.items():
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
<commit_msg>Sort yml items to get same results for regendoc runs<commit_after># content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in sorted(raw.items()):
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in sorted(self.spec.items()):
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
|
cf09237c9d889b83f76e22835cb62da3711b6342 | netmiko/__init__.py | netmiko/__init__.py | from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.2"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
| from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.3"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
| Fix markdown issue on pypi | Fix markdown issue on pypi
| Python | mit | ktbyers/netmiko,ktbyers/netmiko | from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.2"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
Fix markdown issue on pypi | from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.3"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
| <commit_before>from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.2"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
<commit_msg>Fix markdown issue on pypi<commit_after> | from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.3"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
| from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.2"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
Fix markdown issue on pypifrom __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.3"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
| <commit_before>from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.2"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
<commit_msg>Fix markdown issue on pypi<commit_after>from __future__ import unicode_literals
import logging
# Logging configuration
log = logging.getLogger(__name__) # noqa
log.addHandler(logging.NullHandler()) # noqa
from netmiko.ssh_dispatcher import ConnectHandler
from netmiko.ssh_dispatcher import ssh_dispatcher
from netmiko.ssh_dispatcher import redispatch
from netmiko.ssh_dispatcher import platforms
from netmiko.ssh_dispatcher import FileTransfer
from netmiko.scp_handler import SCPConn
from netmiko.cisco.cisco_ios import InLineTransfer
from netmiko.ssh_exception import NetMikoTimeoutException
from netmiko.ssh_exception import NetMikoAuthenticationException
from netmiko.ssh_autodetect import SSHDetect
from netmiko.base_connection import BaseConnection
from netmiko.scp_functions import file_transfer
# Alternate naming
NetmikoTimeoutError = NetMikoTimeoutException
NetmikoAuthError = NetMikoAuthenticationException
Netmiko = ConnectHandler
__version__ = "2.3.3"
__all__ = (
"ConnectHandler",
"ssh_dispatcher",
"platforms",
"SCPConn",
"FileTransfer",
"NetMikoTimeoutException",
"NetMikoAuthenticationException",
"NetmikoTimeoutError",
"NetmikoAuthError",
"InLineTransfer",
"redispatch",
"SSHDetect",
"BaseConnection",
"Netmiko",
"file_transfer",
)
# Cisco cntl-shift-six sequence
CNTL_SHIFT_6 = chr(30)
|
a3fce3124168cde5dec925c3346bab59f4e6d59c | blog/forms.py | blog/forms.py | from .models import BlogPost
from django import forms
class BlogPostForm(forms.ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
| from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
| Add the form for Comment | Add the form for Comment
| Python | mit | andreagrandi/bloggato,andreagrandi/bloggato | from .models import BlogPost
from django import forms
class BlogPostForm(forms.ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
Add the form for Comment | from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
| <commit_before>from .models import BlogPost
from django import forms
class BlogPostForm(forms.ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
<commit_msg>Add the form for Comment<commit_after> | from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
| from .models import BlogPost
from django import forms
class BlogPostForm(forms.ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
Add the form for Commentfrom .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
| <commit_before>from .models import BlogPost
from django import forms
class BlogPostForm(forms.ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
<commit_msg>Add the form for Comment<commit_after>from .models import BlogPost, Comment
from django.forms import ModelForm
class BlogPostForm(ModelForm):
class Meta:
model = BlogPost
exclude = ('user',)
class CommentForm(ModelForm):
class Meta:
model = Comment
exclude = ('post',)
|
296005cae2af44e7e14a7e7ee9a99a2deab8c924 | pyvarnish/remote.py | pyvarnish/remote.py | # -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close() | # -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = {
'hostname': server,
'port': 22,
# If these are None, Paramiko will figure out the correct values.
'user': None,
'identityfile': None,
}
self.conf.update(self.config())
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
conf = sshconfig.lookup(self.server)
if 'port' in conf:
conf['port'] = int(conf['port'])
return conf
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = self.conf['port'],
username = self.conf['user'],
key_filename = self.conf['identityfile'],)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
| Make the SSH configuration more resilient. | Make the SSH configuration more resilient.
If you don't have certain values specified in your ~/.ssh/config, use
the defaults instead of erroring out.
| Python | bsd-3-clause | redsnapper8t8/pyvarnish | # -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()Make the SSH configuration more resilient.
If you don't have certain values specified in your ~/.ssh/config, use
the defaults instead of erroring out. | # -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = {
'hostname': server,
'port': 22,
# If these are None, Paramiko will figure out the correct values.
'user': None,
'identityfile': None,
}
self.conf.update(self.config())
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
conf = sshconfig.lookup(self.server)
if 'port' in conf:
conf['port'] = int(conf['port'])
return conf
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = self.conf['port'],
username = self.conf['user'],
key_filename = self.conf['identityfile'],)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
| <commit_before># -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()<commit_msg>Make the SSH configuration more resilient.
If you don't have certain values specified in your ~/.ssh/config, use
the defaults instead of erroring out.<commit_after> | # -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = {
'hostname': server,
'port': 22,
# If these are None, Paramiko will figure out the correct values.
'user': None,
'identityfile': None,
}
self.conf.update(self.config())
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
conf = sshconfig.lookup(self.server)
if 'port' in conf:
conf['port'] = int(conf['port'])
return conf
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = self.conf['port'],
username = self.conf['user'],
key_filename = self.conf['identityfile'],)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
| # -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()Make the SSH configuration more resilient.
If you don't have certain values specified in your ~/.ssh/config, use
the defaults instead of erroring out.# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = {
'hostname': server,
'port': 22,
# If these are None, Paramiko will figure out the correct values.
'user': None,
'identityfile': None,
}
self.conf.update(self.config())
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
conf = sshconfig.lookup(self.server)
if 'port' in conf:
conf['port'] = int(conf['port'])
return conf
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = self.conf['port'],
username = self.conf['user'],
key_filename = self.conf['identityfile'],)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
| <commit_before># -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()<commit_msg>Make the SSH configuration more resilient.
If you don't have certain values specified in your ~/.ssh/config, use
the defaults instead of erroring out.<commit_after># -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = {
'hostname': server,
'port': 22,
# If these are None, Paramiko will figure out the correct values.
'user': None,
'identityfile': None,
}
self.conf.update(self.config())
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
conf = sshconfig.lookup(self.server)
if 'port' in conf:
conf['port'] = int(conf['port'])
return conf
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = self.conf['port'],
username = self.conf['user'],
key_filename = self.conf['identityfile'],)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
61605fa39920eca5bd47f7dc1b54eab67dd7d015 | quantum/__init__.py | quantum/__init__.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| Make the quantum top-level a namespace package. | Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d
| Python | apache-2.0 | gkotton/vmware-nsx,gkotton/vmware-nsx | Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| <commit_before><commit_msg>Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d<commit_after> | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| <commit_before><commit_msg>Make the quantum top-level a namespace package.
Change-Id: I8fa596dedcc72fcec73972f6bf158e53c17b7e6d<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| |
3a8ea034e43985d4358b9f3e54c9bfc59ee6e99b | djangocms_spa_vue_js/templatetags/router_tags.py | djangocms_spa_vue_js/templatetags/router_tags.py | import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
| import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
return mark_safe(router_json)
| Remove single quote escaping for router js | Remove single quote escaping for router js
This used to be a html field which crippled the json when a single quote
was present. In the current usage it's required to print this json in a
script tag so this quote issue doesn't exist anymore
| Python | mit | dreipol/djangocms-spa-vue-js | import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
Remove single quote escaping for router js
This used to be a html field which crippled the json when a single quote
was present. In the current usage it's required to print this json in a
script tag so this quote issue doesn't exist anymore | import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
return mark_safe(router_json)
| <commit_before>import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
<commit_msg>Remove single quote escaping for router js
This used to be a html field which crippled the json when a single quote
was present. In the current usage it's required to print this json in a
script tag so this quote issue doesn't exist anymore<commit_after> | import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
return mark_safe(router_json)
| import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
Remove single quote escaping for router js
This used to be a html field which crippled the json when a single quote
was present. In the current usage it's required to print this json in a
script tag so this quote issue doesn't exist anymoreimport json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
return mark_safe(router_json)
| <commit_before>import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
escaped_router_json = router_json.replace("'", "'") # Escape apostrophes to prevent JS errors.
return mark_safe(escaped_router_json)
<commit_msg>Remove single quote escaping for router js
This used to be a html field which crippled the json when a single quote
was present. In the current usage it's required to print this json in a
script tag so this quote issue doesn't exist anymore<commit_after>import json
from django import template
from django.utils.safestring import mark_safe
from ..menu_helpers import get_vue_js_router
register = template.Library()
@register.simple_tag(takes_context=True)
def vue_js_router(context):
if 'vue_js_router' in context:
router = context['vue_js_router']
else:
router = get_vue_js_router(context=context)
router_json = json.dumps(router)
return mark_safe(router_json)
|
8514d379ac3a9d75722b3ccccd0a9da40d2c5819 | __openerp__.py | __openerp__.py | # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| Add EbicsPy Launchpad repository url | Add EbicsPy Launchpad repository url
| Python | agpl-3.0 | yuntux/l10n_fr_ebics | # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
Add EbicsPy Launchpad repository url | # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| <commit_before># -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
<commit_msg>Add EbicsPy Launchpad repository url<commit_after> | # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| # -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
Add EbicsPy Launchpad repository url# -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
| <commit_before># -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
<commit_msg>Add EbicsPy Launchpad repository url<commit_after># -*- coding: utf-8 -*-
{
'name': "l10n_fr_ebics",
'summary': """Implementation of the EBICS banking protocol""",
'description': """
This module provides an interface to echanges files with banks. It's curently a beta version.
This module isbased on the library ebicsPy. It maps Odoo with the ebicspy API.
https://code.launchpad.net/~aurelien-dumaine/+junk/ebicspy
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
""",
'author': "Aurélien DUMAINE - aurelien.dumaine@free.fr",
'website': "http://www.dumaine.me",
# Categories can be used to filter modules in modules listing
# Check https://github.com/odoo/odoo/blob/master/openerp/addons/base/module/module_data.xml
# for the full list
'category': 'Uncategorized',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base'],
# always loaded
'data': [
# 'security/ir.model.access.csv',
'templates.xml',
'views/ebics.xml',
],
# only loaded in demonstration mode
'demo': [
'demo.xml',
],
}
|
926aae6d674f03803158978e211fe0e9e3c51722 | scripts/util/iff.py | scripts/util/iff.py | #!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.error('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.info('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
| #!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.info('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.debug('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
| Correct logging level for messages. | Correct logging level for messages.
| Python | artistic-2.0 | cahirwpz/demoscene,cahirwpz/demoscene,cahirwpz/demoscene,cahirwpz/demoscene | #!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.error('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.info('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
Correct logging level for messages. | #!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.info('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.debug('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
| <commit_before>#!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.error('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.info('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
<commit_msg>Correct logging level for messages.<commit_after> | #!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.info('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.debug('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
| #!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.error('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.info('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
Correct logging level for messages.#!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.info('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.debug('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
| <commit_before>#!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.error('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.info('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
<commit_msg>Correct logging level for messages.<commit_after>#!/usr/bin/env python
from chunk import Chunk
import logging
import struct
class Parser(object):
ChunkAliasMap = {}
def __init__(self, kind):
self._kind = kind
self._chunks = []
def loadFile(self, filename):
with open(filename) as iff:
chunk = Chunk(iff)
logging.info('Reading file "%s"' % filename)
if chunk.getname() == 'FORM' and chunk.read(4) == self._kind:
iff.seek(12)
while True:
try:
chunk = Chunk(iff)
except EOFError:
break
name = chunk.getname()
size = chunk.getsize()
data = chunk.read()
logging.debug('Encountered %s chunk of size %d' % (name, size))
self._chunks.append(self._parseChunk(name, data))
else:
logging.error('File %s is not of IFF/%s type.' % (filename, self._kind))
return False
return True
def _parseChunk(self, name, data):
orig_name = name
for alias, names in self.ChunkAliasMap.items():
if name in names:
name = alias
handler = getattr(self, 'handle%s' % name, None)
if handler:
data = handler(data)
else:
logging.warning('No handler for %s chunk.' % orig_name)
return (orig_name, data)
|
3e91ff11f93e491963f6e38965672a9694ea0786 | optimize/__init__.py | optimize/__init__.py | from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
| from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
try:
from .ipopt_wrapper import minimize_ipopt
except:
import logging
logging.error("Could not import ipopt wrapper. Maybe ipopt is not installed?")
| Make sure package works even if ipopt is not installed | Make sure package works even if ipopt is not installed
Signed-off-by: Matthias Kümmerer <e388d34fd3c0456122779e95f262c0d70198a168@matthias-k.org>
| Python | mit | matthias-k/optpy | from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
Make sure package works even if ipopt is not installed
Signed-off-by: Matthias Kümmerer <e388d34fd3c0456122779e95f262c0d70198a168@matthias-k.org> | from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
try:
from .ipopt_wrapper import minimize_ipopt
except:
import logging
logging.error("Could not import ipopt wrapper. Maybe ipopt is not installed?")
| <commit_before>from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
<commit_msg>Make sure package works even if ipopt is not installed
Signed-off-by: Matthias Kümmerer <e388d34fd3c0456122779e95f262c0d70198a168@matthias-k.org><commit_after> | from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
try:
from .ipopt_wrapper import minimize_ipopt
except:
import logging
logging.error("Could not import ipopt wrapper. Maybe ipopt is not installed?")
| from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
Make sure package works even if ipopt is not installed
Signed-off-by: Matthias Kümmerer <e388d34fd3c0456122779e95f262c0d70198a168@matthias-k.org>from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
try:
from .ipopt_wrapper import minimize_ipopt
except:
import logging
logging.error("Could not import ipopt wrapper. Maybe ipopt is not installed?")
| <commit_before>from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
<commit_msg>Make sure package works even if ipopt is not installed
Signed-off-by: Matthias Kümmerer <e388d34fd3c0456122779e95f262c0d70198a168@matthias-k.org><commit_after>from __future__ import absolute_import
from .optimization import ParameterManager, minimize
from .jacobian import FunctionWithApproxJacobian, FunctionWithApproxJacobianCentral
try:
from .ipopt_wrapper import minimize_ipopt
except:
import logging
logging.error("Could not import ipopt wrapper. Maybe ipopt is not installed?")
|
e99855e31c30d0b554d24b14d98ae8b76e1fc0a0 | create_tables.py | create_tables.py | from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
| from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
| Fix close connection only when called as script. | Fix close connection only when called as script.
| Python | cc0-1.0 | sketchturnerr/WaifuSim-backend,sketchturnerr/WaifuSim-backend | from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
Fix close connection only when called as script. | from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
| <commit_before>from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
<commit_msg>Fix close connection only when called as script.<commit_after> | from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
| from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
Fix close connection only when called as script.from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
| <commit_before>from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
db.manual_close()
if __name__ == '__main__':
create_tables()
<commit_msg>Fix close connection only when called as script.<commit_after>from models.base_model import db
from models.user_model import UserModel
from models.waifu_model import WaifuModel
from models.waifu_message_model import WaifuMessageModel
def create_tables():
db.connect()
db.create_tables((
UserModel,
WaifuModel,
WaifuMessageModel,
), True)
if __name__ == '__main__':
create_tables()
db.manual_close()
|
95cb5fc25b3fb1470c4631b93fea11d6172240a4 | MeetingMinutes.py | MeetingMinutes.py | import sublime, sublime_plugin
import os
import re
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
| import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
self.save_pdf(html_file)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
def save_pdf(self, html_file):
pdf_file = self.change_extension(html_file, ".pdf")
call(["wkhtmltopdf",html_file,pdf_file])
| Add save as pdf feature. | Add save as pdf feature.
| Python | mit | Txarli/sublimetext-meeting-minutes,Txarli/sublimetext-meeting-minutes | import sublime, sublime_plugin
import os
import re
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
Add save as pdf feature. | import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
self.save_pdf(html_file)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
def save_pdf(self, html_file):
pdf_file = self.change_extension(html_file, ".pdf")
call(["wkhtmltopdf",html_file,pdf_file])
| <commit_before>import sublime, sublime_plugin
import os
import re
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
<commit_msg>Add save as pdf feature.<commit_after> | import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
self.save_pdf(html_file)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
def save_pdf(self, html_file):
pdf_file = self.change_extension(html_file, ".pdf")
call(["wkhtmltopdf",html_file,pdf_file])
| import sublime, sublime_plugin
import os
import re
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
Add save as pdf feature.import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
self.save_pdf(html_file)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
def save_pdf(self, html_file):
pdf_file = self.change_extension(html_file, ".pdf")
call(["wkhtmltopdf",html_file,pdf_file])
| <commit_before>import sublime, sublime_plugin
import os
import re
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
<commit_msg>Add save as pdf feature.<commit_after>import sublime, sublime_plugin
import os
import re
from subprocess import call
from .mistune import markdown
HTML_START = '<!DOCTYPE html><html><head><meta charset="utf-8"></head><body>'
HTML_END = '</body></html>'
class CreateMinuteCommand(sublime_plugin.TextCommand):
def run(self, edit):
region = sublime.Region(0, self.view.size())
md_source = self.view.substr(region)
md_source.encode(encoding='UTF-8',errors='strict')
html_source = HTML_START + markdown(md_source) + HTML_END
file_name = self.view.file_name()
html_file = self.change_extension(file_name, ".html")
with open(html_file, 'w+') as file_:
file_.write(html_source)
self.save_pdf(html_file)
print(file_name)
print(html_file)
def change_extension(self,file_name, new_ext):
f, ext = os.path.splitext(file_name)
f += new_ext
return f
def save_pdf(self, html_file):
pdf_file = self.change_extension(html_file, ".pdf")
call(["wkhtmltopdf",html_file,pdf_file])
|
085bc7787eac2d44fd4c19c8161709b20dc324be | finny/commands/generate_structure.py | finny/commands/generate_structure.py | import os
from finny.command import Command
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def run(self):
os.mkdir(self.path, 0755)
"""
You need to create:
.gitignore
requirements.txt
README.md
manage.py
{{ app_name }}:
initializers:
app.py
boot.py
runners:
default.py
development.py
test.py
production.py
monitors:
api.py
models.py
tests:
fixtures/
test_base.py
utils.py
"""
| import os
from finny.command import Command
BASE_FOLDER_TEMPLATES = [
".gitignore",
"requirements.txt",
"README.md",
"manage.py"
]
CONFIG_INITIALIZERS_TEMPLATES = [ "app.py" ]
CONFIG_RUNNERS_TEMPLATES = [ "default.py" ]
CONFIG_TEMPLATES = [
"boot.py",
"development.py.sample"
"test.py.sample",
"production.py.sample"
]
TEMPLATES_PATH = ""
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def _template(self, template_name, path):
pass
def _copy_template(self, source, src, dst):
pass
def run(self):
os.mkdir(self.path, 0755)
self._copy_templates(BASE_FOLDER_TEMPLATES, TEMPLATES_PATH, self.path)
self._copy_templates(CONFIG_INITIALIZERS_TEMPLATES,
TEMPLATES_PATH + "initializers",
"%s/%s/initializers" % (self.path, self.name) )
self._copy_templates(CONFIG_RUNNERS_TEMPLATES,
TEMPLATES_PATH + "runners",
"%s/%s/runners" % (self.path, self.name) )
self._copy_templates(CONFIG_TEMPLATES,
TEMPLATES_PATH + "config",
"%s/%s/" % (self.path, self.name) )
| Copy template structure for the create command | Copy template structure for the create command
| Python | mit | hurrycane/finny | import os
from finny.command import Command
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def run(self):
os.mkdir(self.path, 0755)
"""
You need to create:
.gitignore
requirements.txt
README.md
manage.py
{{ app_name }}:
initializers:
app.py
boot.py
runners:
default.py
development.py
test.py
production.py
monitors:
api.py
models.py
tests:
fixtures/
test_base.py
utils.py
"""
Copy template structure for the create command | import os
from finny.command import Command
BASE_FOLDER_TEMPLATES = [
".gitignore",
"requirements.txt",
"README.md",
"manage.py"
]
CONFIG_INITIALIZERS_TEMPLATES = [ "app.py" ]
CONFIG_RUNNERS_TEMPLATES = [ "default.py" ]
CONFIG_TEMPLATES = [
"boot.py",
"development.py.sample"
"test.py.sample",
"production.py.sample"
]
TEMPLATES_PATH = ""
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def _template(self, template_name, path):
pass
def _copy_template(self, source, src, dst):
pass
def run(self):
os.mkdir(self.path, 0755)
self._copy_templates(BASE_FOLDER_TEMPLATES, TEMPLATES_PATH, self.path)
self._copy_templates(CONFIG_INITIALIZERS_TEMPLATES,
TEMPLATES_PATH + "initializers",
"%s/%s/initializers" % (self.path, self.name) )
self._copy_templates(CONFIG_RUNNERS_TEMPLATES,
TEMPLATES_PATH + "runners",
"%s/%s/runners" % (self.path, self.name) )
self._copy_templates(CONFIG_TEMPLATES,
TEMPLATES_PATH + "config",
"%s/%s/" % (self.path, self.name) )
| <commit_before>import os
from finny.command import Command
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def run(self):
os.mkdir(self.path, 0755)
"""
You need to create:
.gitignore
requirements.txt
README.md
manage.py
{{ app_name }}:
initializers:
app.py
boot.py
runners:
default.py
development.py
test.py
production.py
monitors:
api.py
models.py
tests:
fixtures/
test_base.py
utils.py
"""
<commit_msg>Copy template structure for the create command<commit_after> | import os
from finny.command import Command
BASE_FOLDER_TEMPLATES = [
".gitignore",
"requirements.txt",
"README.md",
"manage.py"
]
CONFIG_INITIALIZERS_TEMPLATES = [ "app.py" ]
CONFIG_RUNNERS_TEMPLATES = [ "default.py" ]
CONFIG_TEMPLATES = [
"boot.py",
"development.py.sample"
"test.py.sample",
"production.py.sample"
]
TEMPLATES_PATH = ""
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def _template(self, template_name, path):
pass
def _copy_template(self, source, src, dst):
pass
def run(self):
os.mkdir(self.path, 0755)
self._copy_templates(BASE_FOLDER_TEMPLATES, TEMPLATES_PATH, self.path)
self._copy_templates(CONFIG_INITIALIZERS_TEMPLATES,
TEMPLATES_PATH + "initializers",
"%s/%s/initializers" % (self.path, self.name) )
self._copy_templates(CONFIG_RUNNERS_TEMPLATES,
TEMPLATES_PATH + "runners",
"%s/%s/runners" % (self.path, self.name) )
self._copy_templates(CONFIG_TEMPLATES,
TEMPLATES_PATH + "config",
"%s/%s/" % (self.path, self.name) )
| import os
from finny.command import Command
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def run(self):
os.mkdir(self.path, 0755)
"""
You need to create:
.gitignore
requirements.txt
README.md
manage.py
{{ app_name }}:
initializers:
app.py
boot.py
runners:
default.py
development.py
test.py
production.py
monitors:
api.py
models.py
tests:
fixtures/
test_base.py
utils.py
"""
Copy template structure for the create commandimport os
from finny.command import Command
BASE_FOLDER_TEMPLATES = [
".gitignore",
"requirements.txt",
"README.md",
"manage.py"
]
CONFIG_INITIALIZERS_TEMPLATES = [ "app.py" ]
CONFIG_RUNNERS_TEMPLATES = [ "default.py" ]
CONFIG_TEMPLATES = [
"boot.py",
"development.py.sample"
"test.py.sample",
"production.py.sample"
]
TEMPLATES_PATH = ""
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def _template(self, template_name, path):
pass
def _copy_template(self, source, src, dst):
pass
def run(self):
os.mkdir(self.path, 0755)
self._copy_templates(BASE_FOLDER_TEMPLATES, TEMPLATES_PATH, self.path)
self._copy_templates(CONFIG_INITIALIZERS_TEMPLATES,
TEMPLATES_PATH + "initializers",
"%s/%s/initializers" % (self.path, self.name) )
self._copy_templates(CONFIG_RUNNERS_TEMPLATES,
TEMPLATES_PATH + "runners",
"%s/%s/runners" % (self.path, self.name) )
self._copy_templates(CONFIG_TEMPLATES,
TEMPLATES_PATH + "config",
"%s/%s/" % (self.path, self.name) )
| <commit_before>import os
from finny.command import Command
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def run(self):
os.mkdir(self.path, 0755)
"""
You need to create:
.gitignore
requirements.txt
README.md
manage.py
{{ app_name }}:
initializers:
app.py
boot.py
runners:
default.py
development.py
test.py
production.py
monitors:
api.py
models.py
tests:
fixtures/
test_base.py
utils.py
"""
<commit_msg>Copy template structure for the create command<commit_after>import os
from finny.command import Command
BASE_FOLDER_TEMPLATES = [
".gitignore",
"requirements.txt",
"README.md",
"manage.py"
]
CONFIG_INITIALIZERS_TEMPLATES = [ "app.py" ]
CONFIG_RUNNERS_TEMPLATES = [ "default.py" ]
CONFIG_TEMPLATES = [
"boot.py",
"development.py.sample"
"test.py.sample",
"production.py.sample"
]
TEMPLATES_PATH = ""
class GenerateStructure(Command):
def __init__(self, name, path):
self.name = name
self.path = path
def _template(self, template_name, path):
pass
def _copy_template(self, source, src, dst):
pass
def run(self):
os.mkdir(self.path, 0755)
self._copy_templates(BASE_FOLDER_TEMPLATES, TEMPLATES_PATH, self.path)
self._copy_templates(CONFIG_INITIALIZERS_TEMPLATES,
TEMPLATES_PATH + "initializers",
"%s/%s/initializers" % (self.path, self.name) )
self._copy_templates(CONFIG_RUNNERS_TEMPLATES,
TEMPLATES_PATH + "runners",
"%s/%s/runners" % (self.path, self.name) )
self._copy_templates(CONFIG_TEMPLATES,
TEMPLATES_PATH + "config",
"%s/%s/" % (self.path, self.name) )
|
4dcca124835655ddbcf34b9d661b63f43eadf4a6 | cms/manage.py | cms/manage.py | #!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| #!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| Fix string layout for readability | Fix string layout for readability
| Python | agpl-3.0 | jamesblunt/edx-platform,hkawasaki/kawasaki-aio8-0,sameetb-cuelogic/edx-platform-test,zofuthan/edx-platform,mtlchun/edx,zerobatu/edx-platform,carsongee/edx-platform,devs1991/test_edx_docmode,dkarakats/edx-platform,franosincic/edx-platform,Lektorium-LLC/edx-platform,dkarakats/edx-platform,procangroup/edx-platform,gsehub/edx-platform,lduarte1991/edx-platform,zadgroup/edx-platform,devs1991/test_edx_docmode,zofuthan/edx-platform,alexthered/kienhoc-platform,don-github/edx-platform,mushtaqak/edx-platform,PepperPD/edx-pepper-platform,etzhou/edx-platform,chauhanhardik/populo,jamiefolsom/edx-platform,chauhanhardik/populo,TeachAtTUM/edx-platform,edx/edx-platform,DefyVentures/edx-platform,sudheerchintala/LearnEraPlatForm,arbrandes/edx-platform,doganov/edx-platform,Edraak/edx-platform,edx-solutions/edx-platform,xingyepei/edx-platform,Semi-global/edx-platform,atsolakid/edx-platform,cognitiveclass/edx-platform,playm2mboy/edx-platform,B-MOOC/edx-platform,PepperPD/edx-pepper-platform,zadgroup/edx-platform,pelikanchik/edx-platform,hastexo/edx-platform,teltek/edx-platform,ubc/edx-platform,waheedahmed/edx-platform,bitifirefly/edx-platform,praveen-pal/edx-platform,don-github/edx-platform,UXE/local-edx,carsongee/edx-platform,vismartltd/edx-platform,pomegranited/edx-platform,deepsrijit1105/edx-platform,B-MOOC/edx-platform,raccoongang/edx-platform,bitifirefly/edx-platform,auferack08/edx-platform,doismellburning/edx-platform,hkawasaki/kawasaki-aio8-1,ESOedX/edx-platform,jazztpt/edx-platform,beni55/edx-platform,itsjeyd/edx-platform,UXE/local-edx,a-parhom/edx-platform,atsolakid/edx-platform,shashank971/edx-platform,marcore/edx-platform,LearnEra/LearnEraPlaftform,SravanthiSinha/edx-platform,ferabra/edx-platform,EduPepperPD/pepper2013,kursitet/edx-platform,nanolearningllc/edx-platform-cypress-2,leansoft/edx-platform,edx-solutions/edx-platform,RPI-OPENEDX/edx-platform,lduarte1991/edx-platform,iivic/BoiseStateX,simbs/edx-platform,CredoReference/edx-platform,bdero/edx-platform,JCBarahona/edX,pabloborrego93/edx-platform,peterm-itr/edx-platform,EduPepperPDTesting/pepper2013-testing,mcgachey/edx-platform,solashirai/edx-platform,ovnicraft/edx-platform,nagyistoce/edx-platform,edx-solutions/edx-platform,alu042/edx-platform,dsajkl/reqiop,kxliugang/edx-platform,zubair-arbi/edx-platform,hkawasaki/kawasaki-aio8-0,doganov/edx-platform,ZLLab-Mooc/edx-platform,DNFcode/edx-platform,eduNEXT/edx-platform,xuxiao19910803/edx-platform,tiagochiavericosta/edx-platform,RPI-OPENEDX/edx-platform,y12uc231/edx-platform,4eek/edx-platform,UOMx/edx-platform,TeachAtTUM/edx-platform,benpatterson/edx-platform,BehavioralInsightsTeam/edx-platform,WatanabeYasumasa/edx-platform,ampax/edx-platform-backup,mjg2203/edx-platform-seas,ampax/edx-platform-backup,nagyistoce/edx-platform,tanmaykm/edx-platform,waheedahmed/edx-platform,defance/edx-platform,Ayub-Khan/edx-platform,RPI-OPENEDX/edx-platform,alexthered/kienhoc-platform,chrisndodge/edx-platform,Shrhawk/edx-platform,UXE/local-edx,shurihell/testasia,zofuthan/edx-platform,knehez/edx-platform,kursitet/edx-platform,vasyarv/edx-platform,EduPepperPD/pepper2013,doganov/edx-platform,nagyistoce/edx-platform,hamzehd/edx-platform,zerobatu/edx-platform,mtlchun/edx,alexthered/kienhoc-platform,playm2mboy/edx-platform,jamiefolsom/edx-platform,cselis86/edx-platform,sameetb-cuelogic/edx-platform-test,MSOpenTech/edx-platform,jbassen/edx-platform,beni55/edx-platform,analyseuc3m/ANALYSE-v1,peterm-itr/edx-platform,caesar2164/edx-platform,chudaol/edx-platform,valtech-mooc/edx-platform,vismartltd/edx-platform,J861449197/edx-platform,franosincic/edx-platform,jazkarta/edx-platform-for-isc,cognitiveclass/edx-platform,philanthropy-u/edx-platform,olexiim/edx-platform,kxliugang/edx-platform,bigdatauniversity/edx-platform,romain-li/edx-platform,arbrandes/edx-platform,CourseTalk/edx-platform,knehez/edx-platform,morenopc/edx-platform,morpheby/levelup-by,antonve/s4-project-mooc,vikas1885/test1,cselis86/edx-platform,kalebhartje/schoolboost,motion2015/a3,kursitet/edx-platform,nanolearningllc/edx-platform-cypress,EduPepperPD/pepper2013,prarthitm/edxplatform,appliedx/edx-platform,wwj718/ANALYSE,doismellburning/edx-platform,MakeHer/edx-platform,pdehaye/theming-edx-platform,cognitiveclass/edx-platform,jamiefolsom/edx-platform,IONISx/edx-platform,nikolas/edx-platform,leansoft/edx-platform,eemirtekin/edx-platform,edry/edx-platform,nttks/jenkins-test,dsajkl/123,OmarIthawi/edx-platform,synergeticsedx/deployment-wipro,fly19890211/edx-platform,beacloudgenius/edx-platform,ESOedX/edx-platform,Edraak/edx-platform,utecuy/edx-platform,fintech-circle/edx-platform,rationalAgent/edx-platform-custom,kamalx/edx-platform,jjmiranda/edx-platform,motion2015/edx-platform,vasyarv/edx-platform,jswope00/GAI,Edraak/circleci-edx-platform,shurihell/testasia,shubhdev/edxOnBaadal,hmcmooc/muddx-platform,msegado/edx-platform,Endika/edx-platform,halvertoluke/edx-platform,caesar2164/edx-platform,andyzsf/edx,JioEducation/edx-platform,shabab12/edx-platform,nttks/edx-platform,pku9104038/edx-platform,hamzehd/edx-platform,mcgachey/edx-platform,syjeon/new_edx,edx-solutions/edx-platform,martynovp/edx-platform,mitocw/edx-platform,proversity-org/edx-platform,TeachAtTUM/edx-platform,EduPepperPD/pepper2013,Ayub-Khan/edx-platform,mahendra-r/edx-platform,jamiefolsom/edx-platform,4eek/edx-platform,EduPepperPDTesting/pepper2013-testing,pomegranited/edx-platform,MSOpenTech/edx-platform,chauhanhardik/populo_2,wwj718/edx-platform,JioEducation/edx-platform,OmarIthawi/edx-platform,xingyepei/edx-platform,stvstnfrd/edx-platform,olexiim/edx-platform,kxliugang/edx-platform,xuxiao19910803/edx,nanolearningllc/edx-platform-cypress,pabloborrego93/edx-platform,motion2015/a3,edry/edx-platform,xinjiguaike/edx-platform,knehez/edx-platform,vikas1885/test1,eduNEXT/edunext-platform,a-parhom/edx-platform,LICEF/edx-platform,nanolearningllc/edx-platform-cypress,chand3040/cloud_that,xuxiao19910803/edx,mbareta/edx-platform-ft,playm2mboy/edx-platform,doismellburning/edx-platform,rhndg/openedx,jbassen/edx-platform,prarthitm/edxplatform,yokose-ks/edx-platform,cyanna/edx-platform,zhenzhai/edx-platform,arifsetiawan/edx-platform,xinjiguaike/edx-platform,jbzdak/edx-platform,zubair-arbi/edx-platform,cpennington/edx-platform,hastexo/edx-platform,morenopc/edx-platform,longmen21/edx-platform,bigdatauniversity/edx-platform,Endika/edx-platform,BehavioralInsightsTeam/edx-platform,MakeHer/edx-platform,playm2mboy/edx-platform,shubhdev/openedx,Shrhawk/edx-platform,DefyVentures/edx-platform,bdero/edx-platform,cyanna/edx-platform,angelapper/edx-platform,nikolas/edx-platform,etzhou/edx-platform,jelugbo/tundex,eduNEXT/edunext-platform,jswope00/griffinx,JCBarahona/edX,Unow/edx-platform,OmarIthawi/edx-platform,amir-qayyum-khan/edx-platform,ampax/edx-platform-backup,morpheby/levelup-by,dsajkl/reqiop,abdoosh00/edraak,SivilTaram/edx-platform,tiagochiavericosta/edx-platform,olexiim/edx-platform,unicri/edx-platform,DefyVentures/edx-platform,J861449197/edx-platform,antoviaque/edx-platform,pomegranited/edx-platform,nanolearningllc/edx-platform-cypress-2,EDUlib/edx-platform,nttks/edx-platform,chauhanhardik/populo_2,B-MOOC/edx-platform,zofuthan/edx-platform,torchingloom/edx-platform,Livit/Livit.Learn.EdX,gymnasium/edx-platform,benpatterson/edx-platform,auferack08/edx-platform,rue89-tech/edx-platform,WatanabeYasumasa/edx-platform,IndonesiaX/edx-platform,philanthropy-u/edx-platform,Edraak/edraak-platform,ubc/edx-platform,nikolas/edx-platform,apigee/edx-platform,unicri/edx-platform,IITBinterns13/edx-platform-dev,chrisndodge/edx-platform,fly19890211/edx-platform,alexthered/kienhoc-platform,devs1991/test_edx_docmode,openfun/edx-platform,arifsetiawan/edx-platform,teltek/edx-platform,mjirayu/sit_academy,ferabra/edx-platform,cecep-edu/edx-platform,LearnEra/LearnEraPlaftform,EduPepperPDTesting/pepper2013-testing,angelapper/edx-platform,ahmadiga/min_edx,Softmotions/edx-platform,abdoosh00/edraak,ahmadiga/min_edx,shubhdev/openedx,motion2015/a3,10clouds/edx-platform,alu042/edx-platform,tanmaykm/edx-platform,mjirayu/sit_academy,Ayub-Khan/edx-platform,jelugbo/tundex,appsembler/edx-platform,morpheby/levelup-by,martynovp/edx-platform,SravanthiSinha/edx-platform,cyanna/edx-platform,chudaol/edx-platform,jswope00/griffinx,Semi-global/edx-platform,Edraak/edraak-platform,prarthitm/edxplatform,proversity-org/edx-platform,prarthitm/edxplatform,eemirtekin/edx-platform,dcosentino/edx-platform,rhndg/openedx,JioEducation/edx-platform,abdoosh00/edx-rtl-final,eduNEXT/edx-platform,halvertoluke/edx-platform,eestay/edx-platform,antonve/s4-project-mooc,appsembler/edx-platform,unicri/edx-platform,xinjiguaike/edx-platform,Kalyzee/edx-platform,edx/edx-platform,synergeticsedx/deployment-wipro,shabab12/edx-platform,jbzdak/edx-platform,fly19890211/edx-platform,Stanford-Online/edx-platform,motion2015/edx-platform,JioEducation/edx-platform,iivic/BoiseStateX,valtech-mooc/edx-platform,dsajkl/123,jswope00/griffinx,4eek/edx-platform,EduPepperPDTesting/pepper2013-testing,Unow/edx-platform,jjmiranda/edx-platform,RPI-OPENEDX/edx-platform,nikolas/edx-platform,shashank971/edx-platform,mtlchun/edx,pdehaye/theming-edx-platform,nttks/edx-platform,kmoocdev/edx-platform,kxliugang/edx-platform,jamesblunt/edx-platform,appsembler/edx-platform,don-github/edx-platform,TsinghuaX/edx-platform,beni55/edx-platform,shabab12/edx-platform,vikas1885/test1,IndonesiaX/edx-platform,dcosentino/edx-platform,stvstnfrd/edx-platform,beacloudgenius/edx-platform,dsajkl/reqiop,UXE/local-edx,MakeHer/edx-platform,polimediaupv/edx-platform,ahmadio/edx-platform,jruiperezv/ANALYSE,rhndg/openedx,msegado/edx-platform,mjg2203/edx-platform-seas,ovnicraft/edx-platform,Livit/Livit.Learn.EdX,shubhdev/edxOnBaadal,vismartltd/edx-platform,stvstnfrd/edx-platform,polimediaupv/edx-platform,nagyistoce/edx-platform,syjeon/new_edx,syjeon/new_edx,yokose-ks/edx-platform,devs1991/test_edx_docmode,shashank971/edx-platform,hkawasaki/kawasaki-aio8-1,naresh21/synergetics-edx-platform,kursitet/edx-platform,edry/edx-platform,rue89-tech/edx-platform,nanolearningllc/edx-platform-cypress,ahmadiga/min_edx,Edraak/edx-platform,UOMx/edx-platform,franosincic/edx-platform,fintech-circle/edx-platform,rhndg/openedx,eestay/edx-platform,nanolearningllc/edx-platform-cypress-2,inares/edx-platform,Kalyzee/edx-platform,dkarakats/edx-platform,bdero/edx-platform,knehez/edx-platform,SravanthiSinha/edx-platform,appliedx/edx-platform,utecuy/edx-platform,pelikanchik/edx-platform,LICEF/edx-platform,caesar2164/edx-platform,praveen-pal/edx-platform,kmoocdev2/edx-platform,dkarakats/edx-platform,cognitiveclass/edx-platform,zubair-arbi/edx-platform,edry/edx-platform,yokose-ks/edx-platform,alexthered/kienhoc-platform,cecep-edu/edx-platform,beacloudgenius/edx-platform,analyseuc3m/ANALYSE-v1,zhenzhai/edx-platform,carsongee/edx-platform,kmoocdev2/edx-platform,vismartltd/edx-platform,dcosentino/edx-platform,TeachAtTUM/edx-platform,nanolearning/edx-platform,jelugbo/tundex,UOMx/edx-platform,JCBarahona/edX,shubhdev/edx-platform,jazkarta/edx-platform,LICEF/edx-platform,rismalrv/edx-platform,adoosii/edx-platform,cpennington/edx-platform,wwj718/edx-platform,devs1991/test_edx_docmode,B-MOOC/edx-platform,AkA84/edx-platform,teltek/edx-platform,LearnEra/LearnEraPlaftform,WatanabeYasumasa/edx-platform,ubc/edx-platform,procangroup/edx-platform,abdoosh00/edraak,mbareta/edx-platform-ft,CourseTalk/edx-platform,ahmadio/edx-platform,Unow/edx-platform,longmen21/edx-platform,jamesblunt/edx-platform,utecuy/edx-platform,ampax/edx-platform,jzoldak/edx-platform,rue89-tech/edx-platform,ZLLab-Mooc/edx-platform,DNFcode/edx-platform,mahendra-r/edx-platform,msegado/edx-platform,Endika/edx-platform,10clouds/edx-platform,shubhdev/edxOnBaadal,IndonesiaX/edx-platform,raccoongang/edx-platform,abdoosh00/edx-rtl-final,playm2mboy/edx-platform,angelapper/edx-platform,motion2015/edx-platform,Lektorium-LLC/edx-platform,shubhdev/edxOnBaadal,louyihua/edx-platform,y12uc231/edx-platform,hkawasaki/kawasaki-aio8-0,vasyarv/edx-platform,jswope00/griffinx,Softmotions/edx-platform,etzhou/edx-platform,jswope00/GAI,tiagochiavericosta/edx-platform,itsjeyd/edx-platform,jelugbo/tundex,nanolearning/edx-platform,nanolearning/edx-platform,kmoocdev2/edx-platform,praveen-pal/edx-platform,openfun/edx-platform,lduarte1991/edx-platform,jazkarta/edx-platform,franosincic/edx-platform,jswope00/GAI,beni55/edx-platform,adoosii/edx-platform,bitifirefly/edx-platform,jazkarta/edx-platform,zerobatu/edx-platform,marcore/edx-platform,EduPepperPDTesting/pepper2013-testing,inares/edx-platform,motion2015/edx-platform,ovnicraft/edx-platform,EDUlib/edx-platform,TsinghuaX/edx-platform,alu042/edx-platform,xuxiao19910803/edx-platform,pdehaye/theming-edx-platform,LearnEra/LearnEraPlaftform,chand3040/cloud_that,dsajkl/123,DNFcode/edx-platform,solashirai/edx-platform,mahendra-r/edx-platform,zhenzhai/edx-platform,BehavioralInsightsTeam/edx-platform,kmoocdev/edx-platform,deepsrijit1105/edx-platform,pku9104038/edx-platform,shabab12/edx-platform,Livit/Livit.Learn.EdX,atsolakid/edx-platform,doganov/edx-platform,vasyarv/edx-platform,morenopc/edx-platform,Softmotions/edx-platform,nttks/jenkins-test,ESOedX/edx-platform,iivic/BoiseStateX,kalebhartje/schoolboost,EDUlib/edx-platform,proversity-org/edx-platform,openfun/edx-platform,jonathan-beard/edx-platform,hmcmooc/muddx-platform,shashank971/edx-platform,ZLLab-Mooc/edx-platform,arifsetiawan/edx-platform,chudaol/edx-platform,gsehub/edx-platform,Ayub-Khan/edx-platform,hkawasaki/kawasaki-aio8-2,rismalrv/edx-platform,vasyarv/edx-platform,eduNEXT/edunext-platform,amir-qayyum-khan/edx-platform,CredoReference/edx-platform,ak2703/edx-platform,itsjeyd/edx-platform,mjg2203/edx-platform-seas,cselis86/edx-platform,stvstnfrd/edx-platform,cecep-edu/edx-platform,wwj718/edx-platform,xuxiao19910803/edx-platform,syjeon/new_edx,don-github/edx-platform,ahmadio/edx-platform,JCBarahona/edX,chauhanhardik/populo,Stanford-Online/edx-platform,shubhdev/openedx,hkawasaki/kawasaki-aio8-1,miptliot/edx-platform,unicri/edx-platform,rismalrv/edx-platform,inares/edx-platform,eemirtekin/edx-platform,J861449197/edx-platform,zhenzhai/edx-platform,shurihell/testasia,bdero/edx-platform,pabloborrego93/edx-platform,jbzdak/edx-platform,appliedx/edx-platform,cecep-edu/edx-platform,CourseTalk/edx-platform,rationalAgent/edx-platform-custom,zhenzhai/edx-platform,bigdatauniversity/edx-platform,halvertoluke/edx-platform,eestay/edx-platform,jbzdak/edx-platform,LICEF/edx-platform,DefyVentures/edx-platform,ak2703/edx-platform,MSOpenTech/edx-platform,ahmedaljazzar/edx-platform,wwj718/edx-platform,kmoocdev2/edx-platform,ahmadiga/min_edx,chauhanhardik/populo_2,jzoldak/edx-platform,nttks/jenkins-test,antoviaque/edx-platform,mahendra-r/edx-platform,AkA84/edx-platform,Semi-global/edx-platform,MakeHer/edx-platform,abdoosh00/edx-rtl-final,devs1991/test_edx_docmode,kalebhartje/schoolboost,appliedx/edx-platform,ovnicraft/edx-platform,dsajkl/123,apigee/edx-platform,leansoft/edx-platform,ampax/edx-platform,cyanna/edx-platform,xinjiguaike/edx-platform,IONISx/edx-platform,simbs/edx-platform,apigee/edx-platform,jbassen/edx-platform,zerobatu/edx-platform,mahendra-r/edx-platform,10clouds/edx-platform,mjirayu/sit_academy,beacloudgenius/edx-platform,hastexo/edx-platform,jolyonb/edx-platform,mbareta/edx-platform-ft,romain-li/edx-platform,wwj718/ANALYSE,pelikanchik/edx-platform,mbareta/edx-platform-ft,cyanna/edx-platform,jjmiranda/edx-platform,mjg2203/edx-platform-seas,jazztpt/edx-platform,solashirai/edx-platform,chand3040/cloud_that,benpatterson/edx-platform,yokose-ks/edx-platform,jazztpt/edx-platform,y12uc231/edx-platform,don-github/edx-platform,valtech-mooc/edx-platform,edx/edx-platform,naresh21/synergetics-edx-platform,chand3040/cloud_that,Semi-global/edx-platform,louyihua/edx-platform,pepeportela/edx-platform,Livit/Livit.Learn.EdX,wwj718/ANALYSE,IONISx/edx-platform,chauhanhardik/populo_2,cselis86/edx-platform,AkA84/edx-platform,cecep-edu/edx-platform,martynovp/edx-platform,caesar2164/edx-platform,hmcmooc/muddx-platform,Lektorium-LLC/edx-platform,ahmedaljazzar/edx-platform,DNFcode/edx-platform,andyzsf/edx,kmoocdev2/edx-platform,devs1991/test_edx_docmode,chrisndodge/edx-platform,shashank971/edx-platform,zadgroup/edx-platform,kalebhartje/schoolboost,miptliot/edx-platform,Edraak/circleci-edx-platform,jazkarta/edx-platform-for-isc,cognitiveclass/edx-platform,simbs/edx-platform,jbassen/edx-platform,mcgachey/edx-platform,vikas1885/test1,IndonesiaX/edx-platform,bigdatauniversity/edx-platform,olexiim/edx-platform,appsembler/edx-platform,beacloudgenius/edx-platform,ubc/edx-platform,etzhou/edx-platform,pepeportela/edx-platform,Ayub-Khan/edx-platform,sudheerchintala/LearnEraPlatForm,dkarakats/edx-platform,kmoocdev/edx-platform,sameetb-cuelogic/edx-platform-test,romain-li/edx-platform,openfun/edx-platform,Endika/edx-platform,shubhdev/openedx,wwj718/ANALYSE,polimediaupv/edx-platform,solashirai/edx-platform,jonathan-beard/edx-platform,zadgroup/edx-platform,ZLLab-Mooc/edx-platform,tiagochiavericosta/edx-platform,jazztpt/edx-platform,chauhanhardik/populo,ampax/edx-platform-backup,leansoft/edx-platform,adoosii/edx-platform,sameetb-cuelogic/edx-platform-test,hastexo/edx-platform,nttks/edx-platform,openfun/edx-platform,antonve/s4-project-mooc,gymnasium/edx-platform,hkawasaki/kawasaki-aio8-2,ampax/edx-platform-backup,OmarIthawi/edx-platform,deepsrijit1105/edx-platform,tanmaykm/edx-platform,zerobatu/edx-platform,benpatterson/edx-platform,rhndg/openedx,torchingloom/edx-platform,EduPepperPD/pepper2013,ak2703/edx-platform,mitocw/edx-platform,defance/edx-platform,inares/edx-platform,jazkarta/edx-platform-for-isc,AkA84/edx-platform,halvertoluke/edx-platform,shubhdev/edxOnBaadal,nagyistoce/edx-platform,SivilTaram/edx-platform,CourseTalk/edx-platform,kxliugang/edx-platform,IITBinterns13/edx-platform-dev,pelikanchik/edx-platform,xingyepei/edx-platform,ak2703/edx-platform,rationalAgent/edx-platform-custom,edry/edx-platform,jruiperezv/ANALYSE,ferabra/edx-platform,valtech-mooc/edx-platform,ahmedaljazzar/edx-platform,arbrandes/edx-platform,kmoocdev/edx-platform,jjmiranda/edx-platform,hkawasaki/kawasaki-aio8-2,alu042/edx-platform,lduarte1991/edx-platform,cpennington/edx-platform,mtlchun/edx,antoviaque/edx-platform,zofuthan/edx-platform,jolyonb/edx-platform,Kalyzee/edx-platform,kalebhartje/schoolboost,jazkarta/edx-platform,cselis86/edx-platform,mcgachey/edx-platform,etzhou/edx-platform,Stanford-Online/edx-platform,raccoongang/edx-platform,angelapper/edx-platform,iivic/BoiseStateX,zadgroup/edx-platform,andyzsf/edx,analyseuc3m/ANALYSE-v1,hmcmooc/muddx-platform,mjirayu/sit_academy,procangroup/edx-platform,abdoosh00/edraak,rismalrv/edx-platform,xuxiao19910803/edx,LICEF/edx-platform,IONISx/edx-platform,itsjeyd/edx-platform,chand3040/cloud_that,marcore/edx-platform,SivilTaram/edx-platform,nanolearning/edx-platform,morenopc/edx-platform,antonve/s4-project-mooc,xingyepei/edx-platform,xuxiao19910803/edx-platform,Edraak/circleci-edx-platform,vikas1885/test1,jonathan-beard/edx-platform,naresh21/synergetics-edx-platform,marcore/edx-platform,tanmaykm/edx-platform,waheedahmed/edx-platform,jzoldak/edx-platform,rue89-tech/edx-platform,arifsetiawan/edx-platform,jzoldak/edx-platform,rationalAgent/edx-platform-custom,philanthropy-u/edx-platform,Softmotions/edx-platform,atsolakid/edx-platform,appliedx/edx-platform,nanolearningllc/edx-platform-cypress-2,auferack08/edx-platform,ampax/edx-platform,antonve/s4-project-mooc,mjirayu/sit_academy,amir-qayyum-khan/edx-platform,vismartltd/edx-platform,eestay/edx-platform,nttks/jenkins-test,knehez/edx-platform,polimediaupv/edx-platform,y12uc231/edx-platform,pabloborrego93/edx-platform,ESOedX/edx-platform,Unow/edx-platform,kamalx/edx-platform,nttks/edx-platform,bigdatauniversity/edx-platform,xuxiao19910803/edx,kamalx/edx-platform,simbs/edx-platform,halvertoluke/edx-platform,TsinghuaX/edx-platform,edx/edx-platform,AkA84/edx-platform,PepperPD/edx-pepper-platform,chudaol/edx-platform,eemirtekin/edx-platform,chudaol/edx-platform,eduNEXT/edunext-platform,mtlchun/edx,shubhdev/edx-platform,SravanthiSinha/edx-platform,MakeHer/edx-platform,naresh21/synergetics-edx-platform,Edraak/circleci-edx-platform,ovnicraft/edx-platform,rue89-tech/edx-platform,ZLLab-Mooc/edx-platform,shubhdev/openedx,J861449197/edx-platform,eestay/edx-platform,IONISx/edx-platform,B-MOOC/edx-platform,longmen21/edx-platform,IITBinterns13/edx-platform-dev,jazkarta/edx-platform-for-isc,Semi-global/edx-platform,doismellburning/edx-platform,chauhanhardik/populo_2,EduPepperPDTesting/pepper2013-testing,ahmadio/edx-platform,franosincic/edx-platform,jazztpt/edx-platform,jswope00/GAI,miptliot/edx-platform,pdehaye/theming-edx-platform,motion2015/a3,procangroup/edx-platform,louyihua/edx-platform,andyzsf/edx,4eek/edx-platform,adoosii/edx-platform,jamesblunt/edx-platform,kamalx/edx-platform,10clouds/edx-platform,kamalx/edx-platform,synergeticsedx/deployment-wipro,fintech-circle/edx-platform,hamzehd/edx-platform,Shrhawk/edx-platform,DefyVentures/edx-platform,dsajkl/reqiop,jbzdak/edx-platform,kmoocdev/edx-platform,tiagochiavericosta/edx-platform,morenopc/edx-platform,Edraak/circleci-edx-platform,Edraak/edraak-platform,waheedahmed/edx-platform,eemirtekin/edx-platform,J861449197/edx-platform,praveen-pal/edx-platform,jolyonb/edx-platform,carsongee/edx-platform,IITBinterns13/edx-platform-dev,simbs/edx-platform,CredoReference/edx-platform,rismalrv/edx-platform,gymnasium/edx-platform,hkawasaki/kawasaki-aio8-0,Edraak/edraak-platform,bitifirefly/edx-platform,mitocw/edx-platform,mushtaqak/edx-platform,jazkarta/edx-platform,y12uc231/edx-platform,jamesblunt/edx-platform,nikolas/edx-platform,gsehub/edx-platform,IndonesiaX/edx-platform,mushtaqak/edx-platform,ampax/edx-platform,waheedahmed/edx-platform,mcgachey/edx-platform,wwj718/edx-platform,SravanthiSinha/edx-platform,jruiperezv/ANALYSE,nanolearning/edx-platform,zubair-arbi/edx-platform,sudheerchintala/LearnEraPlatForm,valtech-mooc/edx-platform,msegado/edx-platform,Stanford-Online/edx-platform,hamzehd/edx-platform,motion2015/edx-platform,TsinghuaX/edx-platform,zubair-arbi/edx-platform,olexiim/edx-platform,peterm-itr/edx-platform,ak2703/edx-platform,pku9104038/edx-platform,inares/edx-platform,kursitet/edx-platform,pomegranited/edx-platform,yokose-ks/edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-2,teltek/edx-platform,leansoft/edx-platform,utecuy/edx-platform,BehavioralInsightsTeam/edx-platform,ahmadio/edx-platform,louyihua/edx-platform,deepsrijit1105/edx-platform,auferack08/edx-platform,jruiperezv/ANALYSE,hkawasaki/kawasaki-aio8-1,raccoongang/edx-platform,arbrandes/edx-platform,EDUlib/edx-platform,romain-li/edx-platform,romain-li/edx-platform,abdoosh00/edx-rtl-final,atsolakid/edx-platform,ferabra/edx-platform,sameetb-cuelogic/edx-platform-test,arifsetiawan/edx-platform,4eek/edx-platform,fly19890211/edx-platform,RPI-OPENEDX/edx-platform,peterm-itr/edx-platform,antoviaque/edx-platform,martynovp/edx-platform,proversity-org/edx-platform,Shrhawk/edx-platform,jolyonb/edx-platform,gsehub/edx-platform,longmen21/edx-platform,chrisndodge/edx-platform,jonathan-beard/edx-platform,CredoReference/edx-platform,eduNEXT/edx-platform,chauhanhardik/populo,jamiefolsom/edx-platform,polimediaupv/edx-platform,Edraak/edx-platform,Kalyzee/edx-platform,PepperPD/edx-pepper-platform,bitifirefly/edx-platform,gymnasium/edx-platform,PepperPD/edx-pepper-platform,Lektorium-LLC/edx-platform,shubhdev/edx-platform,shubhdev/edx-platform,torchingloom/edx-platform,torchingloom/edx-platform,WatanabeYasumasa/edx-platform,longmen21/edx-platform,solashirai/edx-platform,morpheby/levelup-by,dcosentino/edx-platform,Shrhawk/edx-platform,xingyepei/edx-platform,msegado/edx-platform,xuxiao19910803/edx,utecuy/edx-platform,doganov/edx-platform,xuxiao19910803/edx-platform,miptliot/edx-platform,apigee/edx-platform,jbassen/edx-platform,beni55/edx-platform,benpatterson/edx-platform,motion2015/a3,rationalAgent/edx-platform-custom,martynovp/edx-platform,cpennington/edx-platform,wwj718/ANALYSE,mitocw/edx-platform,jelugbo/tundex,ferabra/edx-platform,a-parhom/edx-platform,defance/edx-platform,shubhdev/edx-platform,shurihell/testasia,devs1991/test_edx_docmode,SivilTaram/edx-platform,DNFcode/edx-platform,unicri/edx-platform,pepeportela/edx-platform,pomegranited/edx-platform,iivic/BoiseStateX,SivilTaram/edx-platform,eduNEXT/edx-platform,MSOpenTech/edx-platform,jruiperezv/ANALYSE,fly19890211/edx-platform,nanolearningllc/edx-platform-cypress-2,UOMx/edx-platform,Edraak/edx-platform,nanolearningllc/edx-platform-cypress,ahmadiga/min_edx,pepeportela/edx-platform,dsajkl/123,Kalyzee/edx-platform,mushtaqak/edx-platform,synergeticsedx/deployment-wipro,dcosentino/edx-platform,Softmotions/edx-platform,fintech-circle/edx-platform,sudheerchintala/LearnEraPlatForm,ahmedaljazzar/edx-platform,philanthropy-u/edx-platform,a-parhom/edx-platform,doismellburning/edx-platform,MSOpenTech/edx-platform,JCBarahona/edX,ubc/edx-platform,nttks/jenkins-test,amir-qayyum-khan/edx-platform,mushtaqak/edx-platform,analyseuc3m/ANALYSE-v1,adoosii/edx-platform,torchingloom/edx-platform,jonathan-beard/edx-platform,pku9104038/edx-platform,defance/edx-platform,shurihell/testasia,jazkarta/edx-platform-for-isc,jswope00/griffinx,xinjiguaike/edx-platform | #!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
Fix string layout for readability | #!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| <commit_before>#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
<commit_msg>Fix string layout for readability<commit_after> | #!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| #!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
Fix string layout for readability#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| <commit_before>#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
<commit_msg>Fix string layout for readability<commit_after>#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. "
"It appears you've customized things.\nYou'll have to run django-admin.py, "
"passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
|
38ac22c8380e91777c22f7dcb9a5297e9737d522 | pymatgen/io/cp2k/tests/test_outputs.py | pymatgen/io/cp2k/tests/test_outputs.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
| # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
| Switch output to to use TEST_FILES path | Switch output to to use TEST_FILES path | Python | mit | davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,gmatteo/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,gmatteo/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,richardtran415/pymatgen | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
Switch output to to use TEST_FILES path | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
| <commit_before># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
<commit_msg>Switch output to to use TEST_FILES path<commit_after> | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
| # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
Switch output to to use TEST_FILES path# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
| <commit_before># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
MODULE_DIR = Path(__file__).resolve().parent
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename='../test_files/cp2k.out', auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
<commit_msg>Switch output to to use TEST_FILES path<commit_after># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
from pathlib import Path
from pymatgen.util.testing import PymatgenTest
from pymatgen.io.cp2k.outputs import Cp2kOutput
TEST_FILES = Path(__file__).parent.parent.joinpath("test_files").resolve()
class SetTest(PymatgenTest):
def setUp(self):
self.out = Cp2kOutput(filename=TEST_FILES / "cp2k.out", auto_load=True)
def test_files(self):
self.out.parse_files()
self.assertEqual(len(self.out.filenames['PDOS']), 2)
def test(self):
self.assertEqual(self.out.spin_polarized, False)
self.assertEqual(self.out.completed, True)
self.assertEqual(self.out.num_warnings, [[1]])
self.assertEqual(self.out.run_type.upper(), 'GEO_OPT')
if __name__ == "__main__":
unittest.main()
|
33e610576462d017bf25a65f1e879e6340f2ca06 | python2/raygun4py/middleware/django.py | python2/raygun4py/middleware/django.py | from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
| from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
| Fix RawPostDataException on request.body access. | Fix RawPostDataException on request.body access.
Django's HttpRequest class doesn't like the `request.body` to be accessed more than one time. Upon the second attempt to read from `request.body`, Django throws a `RawPostDataException`. Since the previous code in this spot was conditional upon a `hasattr(request, 'body')` check, and because hasattr returns False when the attribute access raises an exception, raygun was falling back to looking up the rawData field from `request.raw_post_data`. But that attribute doesn't exist in newer versions of Django, so we were getting exceptions in the raygun4py's Django process_exception middleware, which is a royal pain in the tush.
So, this change does the hasattr check on both attributes, and only includes rawData if one of the `request.body` or `request.raw_post_data` attributes actually exists and is readable. | Python | mit | MindscapeHQ/raygun4py | from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
Fix RawPostDataException on request.body access.
Django's HttpRequest class doesn't like the `request.body` to be accessed more than one time. Upon the second attempt to read from `request.body`, Django throws a `RawPostDataException`. Since the previous code in this spot was conditional upon a `hasattr(request, 'body')` check, and because hasattr returns False when the attribute access raises an exception, raygun was falling back to looking up the rawData field from `request.raw_post_data`. But that attribute doesn't exist in newer versions of Django, so we were getting exceptions in the raygun4py's Django process_exception middleware, which is a royal pain in the tush.
So, this change does the hasattr check on both attributes, and only includes rawData if one of the `request.body` or `request.raw_post_data` attributes actually exists and is readable. | from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
| <commit_before>from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
<commit_msg>Fix RawPostDataException on request.body access.
Django's HttpRequest class doesn't like the `request.body` to be accessed more than one time. Upon the second attempt to read from `request.body`, Django throws a `RawPostDataException`. Since the previous code in this spot was conditional upon a `hasattr(request, 'body')` check, and because hasattr returns False when the attribute access raises an exception, raygun was falling back to looking up the rawData field from `request.raw_post_data`. But that attribute doesn't exist in newer versions of Django, so we were getting exceptions in the raygun4py's Django process_exception middleware, which is a royal pain in the tush.
So, this change does the hasattr check on both attributes, and only includes rawData if one of the `request.body` or `request.raw_post_data` attributes actually exists and is readable.<commit_after> | from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
| from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
Fix RawPostDataException on request.body access.
Django's HttpRequest class doesn't like the `request.body` to be accessed more than one time. Upon the second attempt to read from `request.body`, Django throws a `RawPostDataException`. Since the previous code in this spot was conditional upon a `hasattr(request, 'body')` check, and because hasattr returns False when the attribute access raises an exception, raygun was falling back to looking up the rawData field from `request.raw_post_data`. But that attribute doesn't exist in newer versions of Django, so we were getting exceptions in the raygun4py's Django process_exception middleware, which is a royal pain in the tush.
So, this change does the hasattr check on both attributes, and only includes rawData if one of the `request.body` or `request.raw_post_data` attributes actually exists and is readable.from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
| <commit_before>from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
<commit_msg>Fix RawPostDataException on request.body access.
Django's HttpRequest class doesn't like the `request.body` to be accessed more than one time. Upon the second attempt to read from `request.body`, Django throws a `RawPostDataException`. Since the previous code in this spot was conditional upon a `hasattr(request, 'body')` check, and because hasattr returns False when the attribute access raises an exception, raygun was falling back to looking up the rawData field from `request.raw_post_data`. But that attribute doesn't exist in newer versions of Django, so we were getting exceptions in the raygun4py's Django process_exception middleware, which is a royal pain in the tush.
So, this change does the hasattr check on both attributes, and only includes rawData if one of the `request.body` or `request.raw_post_data` attributes actually exists and is readable.<commit_after>from __future__ import absolute_import
from django.conf import settings
from raygun4py import raygunprovider
class Provider(object):
def __init__(self):
config = getattr(settings, 'RAYGUN4PY_CONFIG', {})
apiKey = getattr(settings, 'RAYGUN4PY_API_KEY', config.get('api_key', None))
self.sender = raygunprovider.RaygunSender(apiKey, config=config)
def process_exception(self, request, exception):
raygunRequest = self._mapRequest(request)
self.sender.send_exception(exception=exception, request=raygunRequest)
def _mapRequest(self, request):
headers = request.META.items()
_headers = dict()
for k, v in headers:
if not k.startswith('wsgi'):
_headers[k] = v
return {
'hostName': request.get_host(),
'url': request.path,
'httpMethod': request.method,
'ipAddress': request.META.get('REMOTE_ADDR', '?'),
'queryString': dict((key, request.GET[key]) for key in request.GET),
'form': dict((key, request.POST[key]) for key in request.POST),
'headers': _headers,
'rawData': request.body if hasattr(request, 'body') else getattr(request, 'raw_post_data', {})
}
|
229b8161f690154620faffd700335920648e1a96 | services/netflix.py | services/netflix.py | import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
| import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, redirect_uri):
params = super(Netflix, self).get_authorize_params(redirect_uri)
params['oauth_consumer_key'] = self.client_id
return params
| Fix token retrieval for Netflix | Fix token retrieval for Netflix
| Python | bsd-3-clause | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
Fix token retrieval for Netflix | import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, redirect_uri):
params = super(Netflix, self).get_authorize_params(redirect_uri)
params['oauth_consumer_key'] = self.client_id
return params
| <commit_before>import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
<commit_msg>Fix token retrieval for Netflix<commit_after> | import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, redirect_uri):
params = super(Netflix, self).get_authorize_params(redirect_uri)
params['oauth_consumer_key'] = self.client_id
return params
| import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
Fix token retrieval for Netfliximport foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, redirect_uri):
params = super(Netflix, self).get_authorize_params(redirect_uri)
params['oauth_consumer_key'] = self.client_id
return params
| <commit_before>import foauth.providers
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
<commit_msg>Fix token retrieval for Netflix<commit_after>import foauth.providers
from oauthlib.oauth1.rfc5849 import SIGNATURE_TYPE_QUERY
class Netflix(foauth.providers.OAuth1):
# General info about the provider
provider_url = 'https://www.netflix.com/'
docs_url = 'http://developer.netflix.com/docs'
# URLs to interact with the API
request_token_url = 'http://api.netflix.com/oauth/request_token'
authorize_url = 'https://api-user.netflix.com/oauth/login'
access_token_url = 'http://api.netflix.com/oauth/access_token'
api_domains = ['api-public.netflix.com', 'api.netflix.com']
available_permissions = [
(None, 'read and manage your queue'),
]
https = False
signature_type = SIGNATURE_TYPE_QUERY
def get_authorize_params(self, redirect_uri):
params = super(Netflix, self).get_authorize_params(redirect_uri)
params['oauth_consumer_key'] = self.client_id
return params
|
19de35d8124a67e459a69080156bd310bb3814ea | rate_delta_point.py | rate_delta_point.py | #!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,4):
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
| #!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,11):
try:
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
except IndexError:
print('NaN')
| Support all metrics for points. | Support all metrics for points.
| Python | mit | tdaede/awcy,tdaede/awcy,tdaede/awcy,tdaede/awcy,tdaede/awcy,tdaede/awcy | #!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,4):
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
Support all metrics for points. | #!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,11):
try:
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
except IndexError:
print('NaN')
| <commit_before>#!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,4):
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
<commit_msg>Support all metrics for points.<commit_after> | #!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,11):
try:
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
except IndexError:
print('NaN')
| #!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,4):
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
Support all metrics for points.#!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,11):
try:
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
except IndexError:
print('NaN')
| <commit_before>#!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,4):
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
<commit_msg>Support all metrics for points.<commit_after>#!/usr/bin/env python3
from numpy import *
from scipy import *
from scipy.interpolate import interp1d
from scipy.interpolate import pchip
import sys
import os
import argparse
import json
a = flipud(loadtxt(sys.argv[1]));
b = flipud(loadtxt(sys.argv[2]));
for m in range(0,11):
try:
ya = a[:,3+m]
yb = b[:,3+m]
ra = a[:,2]*8./a[:,1]
rb = b[:,2]*8./b[:,1]
a_rate = pchip(ya, log(ra))(float(sys.argv[3]))
b_rate = pchip(yb, log(rb))(float(sys.argv[3]))
print(exp(b_rate - a_rate) - 1)
except IndexError:
print('NaN')
|
0ce9a29f83bb9c87df04f49b5e927d7a6aa4c53c | pdfminer/pdfcolor.py | pdfminer/pdfcolor.py |
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = {}
for (name, n) in six.iteritems({
'CalRGB': 3,
'CalGray': 1,
'Lab': 3,
'DeviceRGB': 3,
'DeviceCMYK': 4,
'DeviceGray': 1,
'Separation': 1,
'Indexed': 1,
'Pattern': 1,
}) :
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
| import collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('DeviceGray', 1),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
| Fix colorspace determinism with OrderedDict | Fix colorspace determinism with OrderedDict
| Python | mit | pdfminer/pdfminer.six,goulu/pdfminer |
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = {}
for (name, n) in six.iteritems({
'CalRGB': 3,
'CalGray': 1,
'Lab': 3,
'DeviceRGB': 3,
'DeviceCMYK': 4,
'DeviceGray': 1,
'Separation': 1,
'Indexed': 1,
'Pattern': 1,
}) :
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
Fix colorspace determinism with OrderedDict | import collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('DeviceGray', 1),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
| <commit_before>
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = {}
for (name, n) in six.iteritems({
'CalRGB': 3,
'CalGray': 1,
'Lab': 3,
'DeviceRGB': 3,
'DeviceCMYK': 4,
'DeviceGray': 1,
'Separation': 1,
'Indexed': 1,
'Pattern': 1,
}) :
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
<commit_msg>Fix colorspace determinism with OrderedDict<commit_after> | import collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('DeviceGray', 1),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
|
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = {}
for (name, n) in six.iteritems({
'CalRGB': 3,
'CalGray': 1,
'Lab': 3,
'DeviceRGB': 3,
'DeviceCMYK': 4,
'DeviceGray': 1,
'Separation': 1,
'Indexed': 1,
'Pattern': 1,
}) :
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
Fix colorspace determinism with OrderedDictimport collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('DeviceGray', 1),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
| <commit_before>
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = {}
for (name, n) in six.iteritems({
'CalRGB': 3,
'CalGray': 1,
'Lab': 3,
'DeviceRGB': 3,
'DeviceCMYK': 4,
'DeviceGray': 1,
'Separation': 1,
'Indexed': 1,
'Pattern': 1,
}) :
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
<commit_msg>Fix colorspace determinism with OrderedDict<commit_after>import collections
from .psparser import LIT
import six #Python 2+3 compatibility
## PDFColorSpace
##
LITERAL_DEVICE_GRAY = LIT('DeviceGray')
LITERAL_DEVICE_RGB = LIT('DeviceRGB')
LITERAL_DEVICE_CMYK = LIT('DeviceCMYK')
class PDFColorSpace(object):
def __init__(self, name, ncomponents):
self.name = name
self.ncomponents = ncomponents
return
def __repr__(self):
return '<PDFColorSpace: %s, ncomponents=%d>' % (self.name, self.ncomponents)
PREDEFINED_COLORSPACE = collections.OrderedDict()
for (name, n) in [
('CalRGB', 3),
('CalGray', 1),
('Lab', 3),
('DeviceRGB', 3),
('DeviceCMYK', 4),
('DeviceGray', 1),
('Separation', 1),
('Indexed', 1),
('Pattern', 1),
]:
PREDEFINED_COLORSPACE[name]=PDFColorSpace(name, n)
|
fc2085e3c86e1596f5dc9c032e445887430602b5 | rotational-cipher/rotational_cipher.py | rotational-cipher/rotational_cipher.py | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
| import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| Use lambda function with method | Use lambda function with method
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
Use lambda function with method | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| <commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
<commit_msg>Use lambda function with method<commit_after> | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
Use lambda function with methodimport string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| <commit_before>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
<commit_msg>Use lambda function with method<commit_after>import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
|
7a985d102a347e252b4e6be1776c6b6609fdcdea | pupa/core/default_settings.py | pupa/core/default_settings.py | MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
| MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
ELASTICSEARCH_HOST = 'localhost'
ELASTICSEARCH_TIMEOUT = 2
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
| Add sensible defaults for ElasticSearch settings | Add sensible defaults for ElasticSearch settings
| Python | bsd-3-clause | opencivicdata/pupa,datamade/pupa,influence-usa/pupa,datamade/pupa,opencivicdata/pupa,mileswwatkins/pupa,mileswwatkins/pupa,influence-usa/pupa,rshorey/pupa,rshorey/pupa | MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
Add sensible defaults for ElasticSearch settings | MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
ELASTICSEARCH_HOST = 'localhost'
ELASTICSEARCH_TIMEOUT = 2
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
| <commit_before>MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
<commit_msg>Add sensible defaults for ElasticSearch settings<commit_after> | MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
ELASTICSEARCH_HOST = 'localhost'
ELASTICSEARCH_TIMEOUT = 2
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
| MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
Add sensible defaults for ElasticSearch settingsMONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
ELASTICSEARCH_HOST = 'localhost'
ELASTICSEARCH_TIMEOUT = 2
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
| <commit_before>MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
<commit_msg>Add sensible defaults for ElasticSearch settings<commit_after>MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_DATABASE = 'pupa'
SCRAPELIB_RPM = 60
SCRAPELIB_TIMEOUT = 60
SCRAPELIB_RETRY_ATTEMPTS = 3
SCRAPELIB_RETRY_WAIT_SECONDS = 20
ENABLE_ELASTICSEARCH = False
ELASTICSEARCH_HOST = 'localhost'
ELASTICSEARCH_TIMEOUT = 2
BILL_FILTERS = {}
LEGISLATOR_FILTERS = {}
EVENT_FILTERS = {}
LOGGING_CONFIG = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': "%(asctime)s %(levelname)s %(name)s: %(message)s",
'datefmt': '%H:%M:%S'
}
},
'handlers': {
'default': {'level': 'DEBUG',
'class': 'pupa.ext.ansistrm.ColorizingStreamHandler',
'formatter': 'standard'},
},
'loggers': {
'': {
'handlers': ['default'], 'level': 'DEBUG', 'propagate': True
},
'scrapelib': {
'handlers': ['default'], 'level': 'INFO', 'propagate': False
},
'requests': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
'boto': {
'handlers': ['default'], 'level': 'WARN', 'propagate': False
},
},
}
|
8d18df92373c1fd4c2cfa2fb59f5f49a4f89b78f | djoauth2/conf.py | djoauth2/conf.py | # coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
SSL_ONLY = True
| # coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
REQUIRE_STATE = True
SSL_ONLY = True
| Add option for determining requirement of 'state' parameter. | Add option for determining requirement of 'state' parameter.
| Python | mit | vden/djoauth2-ng,Locu/djoauth2,seler/djoauth2,vden/djoauth2-ng,seler/djoauth2,Locu/djoauth2 | # coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
SSL_ONLY = True
Add option for determining requirement of 'state' parameter. | # coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
REQUIRE_STATE = True
SSL_ONLY = True
| <commit_before># coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
SSL_ONLY = True
<commit_msg>Add option for determining requirement of 'state' parameter.<commit_after> | # coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
REQUIRE_STATE = True
SSL_ONLY = True
| # coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
SSL_ONLY = True
Add option for determining requirement of 'state' parameter.# coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
REQUIRE_STATE = True
SSL_ONLY = True
| <commit_before># coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
SSL_ONLY = True
<commit_msg>Add option for determining requirement of 'state' parameter.<commit_after># coding: utf-8
from django.config import settings
from appconf import AppConf
class DJOAuthConf(AppConf):
class Meta:
prefix = 'djoauth'
ACCESS_TOKEN_LENGTH = 30
ACCESS_TOKEN_LIFETIME = 3600
ACCESS_TOKENS_REFRESHABLE = True
AUTHORIZATION_CODE_LENGTH = 30
AUTHORIZATION_CODE_LIFETIME = 120
CLIENT_KEY_LENGTH = 30
CLIENT_SECRET_LENGTH = 30
REFRESH_TOKEN_LENGTH = 30
REALM = ''
REQUIRE_STATE = True
SSL_ONLY = True
|
1e91b4ad94dd4a986adce22350cec8bd24fa4865 | test/test_connection.py | test/test_connection.py | # -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
| # -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
from .test_stream import MockConnection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
def test_new_streams_use_new_stream_id(self):
conn = spdypy.SPDYConnection('www.google.com')
conn._sck = MockConnection()
stream_id = conn.putrequest(b'GET', b'/')
assert len(conn._streams) == 1
assert stream_id == 1
assert conn._streams[stream_id]
second_stream_id = conn.putrequest(b'POST', b'other')
assert len(conn._streams) == 2
assert second_stream_id == 3
assert conn._streams[second_stream_id]
| Test correctly incrementing stream IDs. | Test correctly incrementing stream IDs.
| Python | mit | Lukasa/spdypy | # -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
Test correctly incrementing stream IDs. | # -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
from .test_stream import MockConnection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
def test_new_streams_use_new_stream_id(self):
conn = spdypy.SPDYConnection('www.google.com')
conn._sck = MockConnection()
stream_id = conn.putrequest(b'GET', b'/')
assert len(conn._streams) == 1
assert stream_id == 1
assert conn._streams[stream_id]
second_stream_id = conn.putrequest(b'POST', b'other')
assert len(conn._streams) == 2
assert second_stream_id == 3
assert conn._streams[second_stream_id]
| <commit_before># -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
<commit_msg>Test correctly incrementing stream IDs.<commit_after> | # -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
from .test_stream import MockConnection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
def test_new_streams_use_new_stream_id(self):
conn = spdypy.SPDYConnection('www.google.com')
conn._sck = MockConnection()
stream_id = conn.putrequest(b'GET', b'/')
assert len(conn._streams) == 1
assert stream_id == 1
assert conn._streams[stream_id]
second_stream_id = conn.putrequest(b'POST', b'other')
assert len(conn._streams) == 2
assert second_stream_id == 3
assert conn._streams[second_stream_id]
| # -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
Test correctly incrementing stream IDs.# -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
from .test_stream import MockConnection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
def test_new_streams_use_new_stream_id(self):
conn = spdypy.SPDYConnection('www.google.com')
conn._sck = MockConnection()
stream_id = conn.putrequest(b'GET', b'/')
assert len(conn._streams) == 1
assert stream_id == 1
assert conn._streams[stream_id]
second_stream_id = conn.putrequest(b'POST', b'other')
assert len(conn._streams) == 2
assert second_stream_id == 3
assert conn._streams[second_stream_id]
| <commit_before># -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
<commit_msg>Test correctly incrementing stream IDs.<commit_after># -*- coding: utf-8 -*-
"""
test/test_connection
~~~~~~~~~~~~~~~~~~~~~
Tests for the SPDYConnection object.
"""
import spdypy
import spdypy.connection
from .test_stream import MockConnection
class TestSPDYConnection(object):
def test_can_create_connection(self):
conn = spdypy.SPDYConnection(None)
assert conn
class TestSPDYConnectionState(object):
def test_connection_has_state(self):
conn = spdypy.SPDYConnection(None)
assert hasattr(conn, '_state')
def test_initial_connection_state_is_new(self):
conn = spdypy.SPDYConnection(None)
assert conn._state == spdypy.connection.NEW
def test_new_streams_use_new_stream_id(self):
conn = spdypy.SPDYConnection('www.google.com')
conn._sck = MockConnection()
stream_id = conn.putrequest(b'GET', b'/')
assert len(conn._streams) == 1
assert stream_id == 1
assert conn._streams[stream_id]
second_stream_id = conn.putrequest(b'POST', b'other')
assert len(conn._streams) == 2
assert second_stream_id == 3
assert conn._streams[second_stream_id]
|
6200b0655f56ecfe85095d695c4914e390520536 | preferences/forms.py | preferences/forms.py | from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state'] | from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization__name='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state'] | Add maybe right connection for name | Add maybe right connection for name
| Python | mit | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot | from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state']Add maybe right connection for name | from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization__name='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state'] | <commit_before>from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state']<commit_msg>Add maybe right connection for name<commit_after> | from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization__name='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state'] | from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state']Add maybe right connection for namefrom django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization__name='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state'] | <commit_before>from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state']<commit_msg>Add maybe right connection for name<commit_after>from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
from opencivicdata.models.people_orgs import Person
class CustomModelFilter(forms.ModelChoiceField):
def label_from_instance(self, obj):
return "%s" % (obj.name)
class PreferencesForm(forms.ModelForm):
senator = CustomModelFilter(queryset=Person.objects.filter(memberships__organization__name='Florida Senate'))
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state'] |
73c72e3723c20019bb839ba7cd4494483fb65c0a | docs/conf.py | docs/conf.py | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2015, Tony Garnock-Jones, Gavin M. Roy, Pivotal and others.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
| # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2017, Tony Garnock-Jones, Gavin M. Roy, Pivotal Software, Inc and contributors.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
| Update (c) year and line | Update (c) year and line | Python | bsd-3-clause | pika/pika,vitaly-krugl/pika | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2015, Tony Garnock-Jones, Gavin M. Roy, Pivotal and others.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
Update (c) year and line | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2017, Tony Garnock-Jones, Gavin M. Roy, Pivotal Software, Inc and contributors.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
| <commit_before># -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2015, Tony Garnock-Jones, Gavin M. Roy, Pivotal and others.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
<commit_msg>Update (c) year and line<commit_after> | # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2017, Tony Garnock-Jones, Gavin M. Roy, Pivotal Software, Inc and contributors.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
| # -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2015, Tony Garnock-Jones, Gavin M. Roy, Pivotal and others.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
Update (c) year and line# -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2017, Tony Garnock-Jones, Gavin M. Roy, Pivotal Software, Inc and contributors.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
| <commit_before># -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2015, Tony Garnock-Jones, Gavin M. Roy, Pivotal and others.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
<commit_msg>Update (c) year and line<commit_after># -*- coding: utf-8 -*-
import sys
sys.path.insert(0, '../')
#needs_sphinx = '1.0'
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.intersphinx']
intersphinx_mapping = {'python': ('https://docs.python.org/3/',
'https://docs.python.org/3/objects.inv'),
'tornado': ('http://www.tornadoweb.org/en/stable/',
'http://www.tornadoweb.org/en/stable/objects.inv')}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = 'pika'
copyright = '2009-2017, Tony Garnock-Jones, Gavin M. Roy, Pivotal Software, Inc and contributors.'
import pika
release = pika.__version__
version = '.'.join(release.split('.')[0:1])
exclude_patterns = ['_build']
add_function_parentheses = True
add_module_names = True
show_authors = True
pygments_style = 'sphinx'
modindex_common_prefix = ['pika']
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'pikadoc'
|
3cfb623b16aa5e52f3617a8d250b58a74b30b65d | project_name/wsgi.py | project_name/wsgi.py | """
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import project_name.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| """
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import {{ project_name }}.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| Revert "Should break travis integration" | Revert "Should break travis integration"
This reverts commit a0dc0168e103ec836e434019c34b345b51e16257.
| Python | mit | pinax/pinax-project-forums,pinax/pinax-project-forums | """
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import project_name.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
Revert "Should break travis integration"
This reverts commit a0dc0168e103ec836e434019c34b345b51e16257. | """
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import {{ project_name }}.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| <commit_before>"""
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import project_name.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
<commit_msg>Revert "Should break travis integration"
This reverts commit a0dc0168e103ec836e434019c34b345b51e16257.<commit_after> | """
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import {{ project_name }}.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| """
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import project_name.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
Revert "Should break travis integration"
This reverts commit a0dc0168e103ec836e434019c34b345b51e16257."""
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import {{ project_name }}.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| <commit_before>"""
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import project_name.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
<commit_msg>Revert "Should break travis integration"
This reverts commit a0dc0168e103ec836e434019c34b345b51e16257.<commit_after>"""
WSGI config for {{ project_name }} project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings")
import {{ project_name }}.startup as startup
startup.run()
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
deb5775d0c8adad078ce5d0976f7f6f49963ca2e | accounts/features/steps/logout.py | accounts/features/steps/logout.py | from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
pass
| from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
#Try to visit my profile page
context.browser.visit(context.server_url + 'accounts/profile/')
#But find that we're redirected to the login page
assert context.browser.url == 'http://localhost:8081/accounts/login/?next=/accounts/profile/'
| Test that the user is logged out | Test that the user is logged out
| Python | bsd-3-clause | f3r3nc/connect,nlhkabu/connect,nlhkabu/connect,f3r3nc/connect,f3r3nc/connect,nlhkabu/connect,f3r3nc/connect,nlhkabu/connect | from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
pass
Test that the user is logged out | from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
#Try to visit my profile page
context.browser.visit(context.server_url + 'accounts/profile/')
#But find that we're redirected to the login page
assert context.browser.url == 'http://localhost:8081/accounts/login/?next=/accounts/profile/'
| <commit_before>from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
pass
<commit_msg>Test that the user is logged out<commit_after> | from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
#Try to visit my profile page
context.browser.visit(context.server_url + 'accounts/profile/')
#But find that we're redirected to the login page
assert context.browser.url == 'http://localhost:8081/accounts/login/?next=/accounts/profile/'
| from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
pass
Test that the user is logged outfrom behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
#Try to visit my profile page
context.browser.visit(context.server_url + 'accounts/profile/')
#But find that we're redirected to the login page
assert context.browser.url == 'http://localhost:8081/accounts/login/?next=/accounts/profile/'
| <commit_before>from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
pass
<commit_msg>Test that the user is logged out<commit_after>from behave import *
# Unique to Scenario: User logs out
@when('I click on the logout link')
def impl(context):
context.browser.find_link_by_text('Logout').first.click()
@then('I am no longer authenticated')
def impl(context):
#Try to visit my profile page
context.browser.visit(context.server_url + 'accounts/profile/')
#But find that we're redirected to the login page
assert context.browser.url == 'http://localhost:8081/accounts/login/?next=/accounts/profile/'
|
dd50a2b3fb157eaa3add329d90de3641ef9ea1ec | flocker/docs/test/test_version_extensions.py | flocker/docs/test/test_version_extensions.py | from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
target = temp_dir.child('contents.html')
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
content = target.getContent()
expected = 'PRE-{}-POST'.format(get_installable_version(version))
self.assertIn(expected, content)
| from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
expected = 'PRE-{}-POST'.format(get_installable_version(version))
content = temp_dir.child('contents.html').getContent()
self.assertIn(expected, content)
| Remove unneeded variable from test. | Remove unneeded variable from test.
| Python | apache-2.0 | w4ngyi/flocker,mbrukman/flocker,w4ngyi/flocker,w4ngyi/flocker,AndyHuu/flocker,hackday-profilers/flocker,hackday-profilers/flocker,wallnerryan/flocker-profiles,hackday-profilers/flocker,mbrukman/flocker,mbrukman/flocker,AndyHuu/flocker,AndyHuu/flocker,wallnerryan/flocker-profiles,wallnerryan/flocker-profiles | from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
target = temp_dir.child('contents.html')
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
content = target.getContent()
expected = 'PRE-{}-POST'.format(get_installable_version(version))
self.assertIn(expected, content)
Remove unneeded variable from test. | from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
expected = 'PRE-{}-POST'.format(get_installable_version(version))
content = temp_dir.child('contents.html').getContent()
self.assertIn(expected, content)
| <commit_before>from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
target = temp_dir.child('contents.html')
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
content = target.getContent()
expected = 'PRE-{}-POST'.format(get_installable_version(version))
self.assertIn(expected, content)
<commit_msg>Remove unneeded variable from test.<commit_after> | from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
expected = 'PRE-{}-POST'.format(get_installable_version(version))
content = temp_dir.child('contents.html').getContent()
self.assertIn(expected, content)
| from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
target = temp_dir.child('contents.html')
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
content = target.getContent()
expected = 'PRE-{}-POST'.format(get_installable_version(version))
self.assertIn(expected, content)
Remove unneeded variable from test.from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
expected = 'PRE-{}-POST'.format(get_installable_version(version))
content = temp_dir.child('contents.html').getContent()
self.assertIn(expected, content)
| <commit_before>from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
target = temp_dir.child('contents.html')
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
content = target.getContent()
expected = 'PRE-{}-POST'.format(get_installable_version(version))
self.assertIn(expected, content)
<commit_msg>Remove unneeded variable from test.<commit_after>from textwrap import dedent
from twisted.python.filepath import FilePath
from twisted.trial.unittest import SynchronousTestCase
from flocker import __version__ as version
from flocker.common.version import get_installable_version
from flocker.testtools import run_process
class VersionExtensionsTest(SynchronousTestCase):
"""
Tests for Sphinx version extensions.
"""
def test_version_prompt(self):
"""
The ``version-prompt`` directive replaces the placemarker
``|latest-installable|`` in a source file with the current
installable version in the output file.
"""
temp_dir = FilePath(self.mktemp())
temp_dir.makedirs()
source_file = temp_dir.child('contents.rst')
source_file.setContent(dedent('''
.. version-prompt:: bash $
$ PRE-|latest-installable|-POST
'''))
run_process([
'sphinx-build', '-b', 'html',
'-C', # don't look for config file, use -D flags instead
'-D', 'extensions=flocker.docs.version_extensions',
temp_dir.path, # directory containing source/config files
temp_dir.path, # directory containing build files
source_file.path]) # source file to process
expected = 'PRE-{}-POST'.format(get_installable_version(version))
content = temp_dir.child('contents.html').getContent()
self.assertIn(expected, content)
|
6b38ca9283b3b367aa19a2722f9f6eea22c6c90b | cms_chunks/models.py | cms_chunks/models.py | from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
| from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def __unicode__(self):
return "Tags: %s. Priority: %s" % (self.tags, self.priority)
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
| Add useful method to know if a chunk has a tag | Add useful method to know if a chunk has a tag
| Python | mit | devartis/django-cms-chunks | from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
Add useful method to know if a chunk has a tag | from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def __unicode__(self):
return "Tags: %s. Priority: %s" % (self.tags, self.priority)
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
| <commit_before>from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
<commit_msg>Add useful method to know if a chunk has a tag<commit_after> | from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def __unicode__(self):
return "Tags: %s. Priority: %s" % (self.tags, self.priority)
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
| from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
Add useful method to know if a chunk has a tagfrom django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def __unicode__(self):
return "Tags: %s. Priority: %s" % (self.tags, self.priority)
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
| <commit_before>from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
<commit_msg>Add useful method to know if a chunk has a tag<commit_after>from django.db import models
from cms.models.fields import PlaceholderField
from managers import ChunkManager
class Chunk(models.Model):
tags = models.CharField(max_length=200)
code = PlaceholderField('chunk_placeholder', related_name="chunks")
priority = models.IntegerField()
objects = ChunkManager()
def __unicode__(self):
return "Tags: %s. Priority: %s" % (self.tags, self.priority)
def has_tag(self, tag):
tags = [string.strip() for string in str(self.tags).split(",")]
return tag in tags
# Create your models here.
|
971a180a5afab6ee53a9b15413341c649cda0a1c | dtags/config.py | dtags/config.py | import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
tag_data = {} if not json_str else json.loads(json_str)
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
| import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
if not json_str:
return {}
tag_data = json.loads(json_str)
if not tag_data:
return {}
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
| Fix a bug in load_tags function | Fix a bug in load_tags function
| Python | mit | joowani/dtags | import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
tag_data = {} if not json_str else json.loads(json_str)
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
Fix a bug in load_tags function | import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
if not json_str:
return {}
tag_data = json.loads(json_str)
if not tag_data:
return {}
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
| <commit_before>import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
tag_data = {} if not json_str else json.loads(json_str)
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
<commit_msg>Fix a bug in load_tags function<commit_after> | import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
if not json_str:
return {}
tag_data = json.loads(json_str)
if not tag_data:
return {}
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
| import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
tag_data = {} if not json_str else json.loads(json_str)
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
Fix a bug in load_tags functionimport os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
if not json_str:
return {}
tag_data = json.loads(json_str)
if not tag_data:
return {}
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
| <commit_before>import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
tag_data = {} if not json_str else json.loads(json_str)
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
<commit_msg>Fix a bug in load_tags function<commit_after>import os
import json
from dtags.utils import halt, expand_path
TAGS = os.path.expanduser('~/.dtags')
def load_tags():
"""Load the tags from disk."""
if not os.path.exists(TAGS):
try:
with open(TAGS, "w") as config_file:
json.dump({}, config_file)
except (IOError, OSError) as e:
halt("Failed to init {}: {}".format(TAGS, e.strerror), e.errno)
else:
try:
with open(TAGS, "r") as config_file:
json_str = config_file.read().strip()
if not json_str:
return {}
tag_data = json.loads(json_str)
if not tag_data:
return {}
return {
tag: {expand_path(path): path for path in paths}
for tag, paths in tag_data.items()
}
except ValueError as e:
halt("Failed to load {}: {}".format(TAGS, e))
except (IOError, OSError) as e:
halt("Failed to load {}: {}".format(TAGS, e.strerror), e.errno)
def save_tags(tags):
"""Save the tags to disk.
:param tags: tags to save
"""
try:
with open(TAGS, "w") as config_file:
json.dump(
{tag: sorted(paths.values()) for tag, paths in tags.items()},
config_file,
indent=4,
sort_keys=True
)
except IOError as e:
halt("Failed to save {}: {}".format(TAGS, e.strerror))
|
10e37d95dde00cd02d91998662a22f555837e877 | hp3parclient/__init__.py | hp3parclient/__init__.py | # Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 0)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
| # Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 1)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
| Update the version to 3.1.1 | Update the version to 3.1.1
Change-Id: Ia24c8a9e62330243fa7413a649f885c2a40dd4fb
| Python | apache-2.0 | hp-storage/python-3parclient,hpe-storage/python-3parclient | # Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 0)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
Update the version to 3.1.1
Change-Id: Ia24c8a9e62330243fa7413a649f885c2a40dd4fb | # Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 1)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
| <commit_before># Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 0)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
<commit_msg>Update the version to 3.1.1
Change-Id: Ia24c8a9e62330243fa7413a649f885c2a40dd4fb<commit_after> | # Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 1)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
| # Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 0)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
Update the version to 3.1.1
Change-Id: Ia24c8a9e62330243fa7413a649f885c2a40dd4fb# Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 1)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
| <commit_before># Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 0)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
<commit_msg>Update the version to 3.1.1
Change-Id: Ia24c8a9e62330243fa7413a649f885c2a40dd4fb<commit_after># Copyright 2012-2014 Hewlett Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
HP 3PAR Client.
:Author: Walter A. Boring IV
:Author: Kurt Martin
:Copyright: Copyright 2012-2014, Hewlett Packard Development Company, L.P.
:License: Apache v2.0
"""
version_tuple = (3, 1, 1)
def get_version_string():
"""Current version of HP3PARClient."""
if isinstance(version_tuple[-1], str):
return '.'.join(map(str, version_tuple[:-1])) + version_tuple[-1]
return '.'.join(map(str, version_tuple))
version = get_version_string()
|
9f95715cc7260d02d88781c208f6a6a167496015 | aiohttp_json_api/jsonpointer/__init__.py | aiohttp_json_api/jsonpointer/__init__.py | """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| Fix bug with JSONPointer if part passed via __truediv__ is integer | Fix bug with JSONPointer if part passed via __truediv__ is integer
| Python | mit | vovanbo/aiohttp_json_api | """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
Fix bug with JSONPointer if part passed via __truediv__ is integer | """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| <commit_before>"""
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
<commit_msg>Fix bug with JSONPointer if part passed via __truediv__ is integer<commit_after> | """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| """
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
Fix bug with JSONPointer if part passed via __truediv__ is integer"""
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
| <commit_before>"""
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
<commit_msg>Fix bug with JSONPointer if part passed via __truediv__ is integer<commit_after>"""
Extended JSONPointer from python-json-pointer_
==============================================
.. _python-json-pointer: https://github.com/stefankoegl/python-json-pointer
"""
import typing
from jsonpointer import JsonPointer as BaseJsonPointer
class JSONPointer(BaseJsonPointer):
def __init__(self, pointer):
super(JSONPointer, self).__init__(pointer)
def __truediv__(self,
path: typing.Union['JSONPointer', str]) -> 'JSONPointer':
parts = self.parts.copy()
if isinstance(path, int):
path = str(path)
if isinstance(path, str):
if not path.startswith('/'):
path = f'/{path}'
new_parts = JSONPointer(path).parts.pop(0)
parts.append(new_parts)
else:
new_parts = path.parts
parts.extend(new_parts)
return JSONPointer.from_parts(parts)
|
c60060f08700bfdbbcb566927624e2ec3af53145 | slash_bot/errors.py | slash_bot/errors.py | # coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
| # coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
| Add missing newline for PEP8. | Add missing newline for PEP8.
| Python | mit | naoey/slash-bot,naoey/slash-bot | # coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
Add missing newline for PEP8. | # coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
| <commit_before># coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
<commit_msg>Add missing newline for PEP8.<commit_after> | # coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
| # coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
Add missing newline for PEP8.# coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
| <commit_before># coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
<commit_msg>Add missing newline for PEP8.<commit_after># coding: utf-8
"""
Created on 2016-08-23
@author: naoey
"""
class SlashBotError(Exception):
pass
class ConfigError(SlashBotError):
def __init__(self, config_attr=None):
if config_attr:
super().init("Missing/invalid config for {}".format(config_attr))
else:
super().init()
class SlashBotValueError(SlashBotError):
pass
class CommandFormatError(SlashBotError):
pass
class ThirdPartyAPIError(SlashBotError):
pass
class AssetsError(SlashBotError):
pass
class CommandDefinitionError(SlashBotError):
pass
class BotPermissionError(SlashBotError):
pass
|
fb61c3c64d2426e4e7a6e454cbf57b15e003ce66 | etl/__init__.py | etl/__init__.py | from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
| from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
CSV_ETL_CLASSES = [
('Clubs', ClubIngest),
('Competitions', CompetitionIngest),
('Players', PlayerIngest),
('Acquisitions', AcquisitionIngest),
('Salaries', PlayerSalaryIngest),
('Partials', PartialTenureIngest),
('FieldStats', FieldStatIngest),
('GkStats', GoalkeeperStatIngest),
('LeaguePoints', LeaguePointIngest)
]
| Add list of entities and ETL classes for CSV files | Add list of entities and ETL classes for CSV files
| Python | mit | soccermetrics/marcotti-mls | from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
Add list of entities and ETL classes for CSV files | from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
CSV_ETL_CLASSES = [
('Clubs', ClubIngest),
('Competitions', CompetitionIngest),
('Players', PlayerIngest),
('Acquisitions', AcquisitionIngest),
('Salaries', PlayerSalaryIngest),
('Partials', PartialTenureIngest),
('FieldStats', FieldStatIngest),
('GkStats', GoalkeeperStatIngest),
('LeaguePoints', LeaguePointIngest)
]
| <commit_before>from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
<commit_msg>Add list of entities and ETL classes for CSV files<commit_after> | from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
CSV_ETL_CLASSES = [
('Clubs', ClubIngest),
('Competitions', CompetitionIngest),
('Players', PlayerIngest),
('Acquisitions', AcquisitionIngest),
('Salaries', PlayerSalaryIngest),
('Partials', PartialTenureIngest),
('FieldStats', FieldStatIngest),
('GkStats', GoalkeeperStatIngest),
('LeaguePoints', LeaguePointIngest)
]
| from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
Add list of entities and ETL classes for CSV filesfrom base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
CSV_ETL_CLASSES = [
('Clubs', ClubIngest),
('Competitions', CompetitionIngest),
('Players', PlayerIngest),
('Acquisitions', AcquisitionIngest),
('Salaries', PlayerSalaryIngest),
('Partials', PartialTenureIngest),
('FieldStats', FieldStatIngest),
('GkStats', GoalkeeperStatIngest),
('LeaguePoints', LeaguePointIngest)
]
| <commit_before>from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
<commit_msg>Add list of entities and ETL classes for CSV files<commit_after>from base import BaseCSV, get_local_handles, ingest_feeds, create_seasons
from overview import (ClubIngest, CountryIngest, CompetitionIngest, PlayerIngest, PersonIngest)
from financial import (AcquisitionIngest, PlayerSalaryIngest, PartialTenureIngest)
from statistics import (FieldStatIngest, GoalkeeperStatIngest, LeaguePointIngest)
CSV_ETL_CLASSES = [
('Clubs', ClubIngest),
('Competitions', CompetitionIngest),
('Players', PlayerIngest),
('Acquisitions', AcquisitionIngest),
('Salaries', PlayerSalaryIngest),
('Partials', PartialTenureIngest),
('FieldStats', FieldStatIngest),
('GkStats', GoalkeeperStatIngest),
('LeaguePoints', LeaguePointIngest)
]
|
0da19042c74d2a85ef4652b36186a1ee6c4fc247 | tilequeue/format/mvt.py | tilequeue/format/mvt.py | from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid)
fp.write(tile)
| from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(
feature_layers,
quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid,
round_fn=round,
)
fp.write(tile)
| Use round_fn to specify built-in round function | Use round_fn to specify built-in round function
| Python | mit | mapzen/tilequeue,tilezen/tilequeue | from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid)
fp.write(tile)
Use round_fn to specify built-in round function | from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(
feature_layers,
quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid,
round_fn=round,
)
fp.write(tile)
| <commit_before>from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid)
fp.write(tile)
<commit_msg>Use round_fn to specify built-in round function<commit_after> | from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(
feature_layers,
quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid,
round_fn=round,
)
fp.write(tile)
| from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid)
fp.write(tile)
Use round_fn to specify built-in round functionfrom mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(
feature_layers,
quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid,
round_fn=round,
)
fp.write(tile)
| <commit_before>from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid)
fp.write(tile)
<commit_msg>Use round_fn to specify built-in round function<commit_after>from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
from mapbox_vector_tile import encode as mvt_encode
def encode(fp, feature_layers, coord, bounds_merc):
tile = mvt_encode(
feature_layers,
quantize_bounds=bounds_merc,
on_invalid_geometry=on_invalid_geometry_make_valid,
round_fn=round,
)
fp.write(tile)
|
464bc1b511415459e99700b94101776d00b23796 | indra/pre_assemble_for_db/pre_assemble_script.py | indra/pre_assemble_for_db/pre_assemble_script.py | import indra.tools.assemble_corpus as ac
def process_statements(stmts):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False)
return stmts
| import indra.tools.assemble_corpus as ac
from indra.db.util import get_statements, insert_pa_stmts
def process_statements(stmts, num_procs=1):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False,
poolsize=num_procs)
return stmts
def preassemble_db_stmts(db, num_procs, *clauses):
"""Run pre-assembly on a set of statements in the database."""
stmts = get_statements(clauses, db=db, do_stmt_count=False)
pa_stmts = process_statements(stmts, num_procs)
insert_pa_stmts(db, pa_stmts)
return pa_stmts
| Create function to handle full pipeline. | Create function to handle full pipeline.
| Python | bsd-2-clause | bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,pvtodorov/indra | import indra.tools.assemble_corpus as ac
def process_statements(stmts):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False)
return stmts
Create function to handle full pipeline. | import indra.tools.assemble_corpus as ac
from indra.db.util import get_statements, insert_pa_stmts
def process_statements(stmts, num_procs=1):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False,
poolsize=num_procs)
return stmts
def preassemble_db_stmts(db, num_procs, *clauses):
"""Run pre-assembly on a set of statements in the database."""
stmts = get_statements(clauses, db=db, do_stmt_count=False)
pa_stmts = process_statements(stmts, num_procs)
insert_pa_stmts(db, pa_stmts)
return pa_stmts
| <commit_before>import indra.tools.assemble_corpus as ac
def process_statements(stmts):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False)
return stmts
<commit_msg>Create function to handle full pipeline.<commit_after> | import indra.tools.assemble_corpus as ac
from indra.db.util import get_statements, insert_pa_stmts
def process_statements(stmts, num_procs=1):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False,
poolsize=num_procs)
return stmts
def preassemble_db_stmts(db, num_procs, *clauses):
"""Run pre-assembly on a set of statements in the database."""
stmts = get_statements(clauses, db=db, do_stmt_count=False)
pa_stmts = process_statements(stmts, num_procs)
insert_pa_stmts(db, pa_stmts)
return pa_stmts
| import indra.tools.assemble_corpus as ac
def process_statements(stmts):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False)
return stmts
Create function to handle full pipeline.import indra.tools.assemble_corpus as ac
from indra.db.util import get_statements, insert_pa_stmts
def process_statements(stmts, num_procs=1):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False,
poolsize=num_procs)
return stmts
def preassemble_db_stmts(db, num_procs, *clauses):
"""Run pre-assembly on a set of statements in the database."""
stmts = get_statements(clauses, db=db, do_stmt_count=False)
pa_stmts = process_statements(stmts, num_procs)
insert_pa_stmts(db, pa_stmts)
return pa_stmts
| <commit_before>import indra.tools.assemble_corpus as ac
def process_statements(stmts):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False)
return stmts
<commit_msg>Create function to handle full pipeline.<commit_after>import indra.tools.assemble_corpus as ac
from indra.db.util import get_statements, insert_pa_stmts
def process_statements(stmts, num_procs=1):
stmts = ac.map_grounding(stmts)
stmts = ac.map_sequence(stmts)
stmts = ac.run_preassembly(stmts, return_toplevel=False,
poolsize=num_procs)
return stmts
def preassemble_db_stmts(db, num_procs, *clauses):
"""Run pre-assembly on a set of statements in the database."""
stmts = get_statements(clauses, db=db, do_stmt_count=False)
pa_stmts = process_statements(stmts, num_procs)
insert_pa_stmts(db, pa_stmts)
return pa_stmts
|
60430260f0bec7b9231c2dcb3ed3394dd81442b2 | fbmsgbot/bot.py | fbmsgbot/bot.py | from http_client import HttpClient
"""
@breif Facebook messenger bot
"""
class Bot():
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
def handle_incoming():
raise NotImplementedError
| from http_client import HttpClient
class Bot():
"""
@breif Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
| Remove ability to recieve messages | Remove ability to recieve messages
| Python | mit | ben-cunningham/python-messenger-bot,ben-cunningham/pybot | from http_client import HttpClient
"""
@breif Facebook messenger bot
"""
class Bot():
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
def handle_incoming():
raise NotImplementedError
Remove ability to recieve messages | from http_client import HttpClient
class Bot():
"""
@breif Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
| <commit_before>from http_client import HttpClient
"""
@breif Facebook messenger bot
"""
class Bot():
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
def handle_incoming():
raise NotImplementedError
<commit_msg>Remove ability to recieve messages<commit_after> | from http_client import HttpClient
class Bot():
"""
@breif Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
| from http_client import HttpClient
"""
@breif Facebook messenger bot
"""
class Bot():
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
def handle_incoming():
raise NotImplementedError
Remove ability to recieve messagesfrom http_client import HttpClient
class Bot():
"""
@breif Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
| <commit_before>from http_client import HttpClient
"""
@breif Facebook messenger bot
"""
class Bot():
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
def handle_incoming():
raise NotImplementedError
<commit_msg>Remove ability to recieve messages<commit_after>from http_client import HttpClient
class Bot():
"""
@breif Facebook messenger bot
"""
def __init__(self, token):
self.api_token = token
self.client = HttpClient()
def send_message(self, message, completion):
def completion(response, error):
if error is None:
# TODO: Is there anything the bot needs to do?
# maybe retry if it fails...?
pass
else:
completion()
self.client.submit_request('/me/messages',
'POST',
message.to_json(),
completion)
|
f54e7d2da0ba321bdd5900c9893f6fe76adad12f | telegramschoolbot/database.py | telegramschoolbot/database.py | """
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session = getattr(threadLocal, "session", None)
if session is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session = scoped_session(session_factory)
return threadLocal.session()
| """
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session_factory = getattr(threadLocal, "session_factory", None)
if session_factory is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session_factory = session_factory
return session_factory()
| Put the session factory in threadLocal, not the session | Put the session factory in threadLocal, not the session
| Python | mit | paolobarbolini/TelegramSchoolBot | """
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session = getattr(threadLocal, "session", None)
if session is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session = scoped_session(session_factory)
return threadLocal.session()
Put the session factory in threadLocal, not the session | """
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session_factory = getattr(threadLocal, "session_factory", None)
if session_factory is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session_factory = session_factory
return session_factory()
| <commit_before>"""
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session = getattr(threadLocal, "session", None)
if session is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session = scoped_session(session_factory)
return threadLocal.session()
<commit_msg>Put the session factory in threadLocal, not the session<commit_after> | """
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session_factory = getattr(threadLocal, "session_factory", None)
if session_factory is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session_factory = session_factory
return session_factory()
| """
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session = getattr(threadLocal, "session", None)
if session is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session = scoped_session(session_factory)
return threadLocal.session()
Put the session factory in threadLocal, not the session"""
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session_factory = getattr(threadLocal, "session_factory", None)
if session_factory is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session_factory = session_factory
return session_factory()
| <commit_before>"""
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session = getattr(threadLocal, "session", None)
if session is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session = scoped_session(session_factory)
return threadLocal.session()
<commit_msg>Put the session factory in threadLocal, not the session<commit_after>"""
Interact with your school website with telegram!
Copyright (c) 2016-2017 Paolo Barbolini <paolo@paolo565.org>
Released under the MIT license
"""
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, scoped_session
import threading
# Temporary logging
"""
import logging
logging.basicConfig()
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
"""
threadLocal = threading.local()
class Database:
def __init__(self, config):
self.config = config
def Session(self):
engine = getattr(threadLocal, "engine", None)
if engine is None:
threadLocal.engine = create_engine(self.config["database_url"])
session_factory = getattr(threadLocal, "session_factory", None)
if session_factory is None:
session_factory = sessionmaker(bind=threadLocal.engine)
threadLocal.session_factory = session_factory
return session_factory()
|
08522cc9c14dca4ea18cd96bf47a43e2f1285248 | kai/controllers/tracs.py | kai/controllers/tracs.py | import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = c.user.timezone
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
| import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
from trac.util.datefmt import _tzoffsetmap
from pytz import timezone
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = _tzoffsetmap.get(c._tzinfo.utcoffset(None))
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
| Add proper timezone data for trac | Add proper timezone data for trac
| Python | bsd-3-clause | Pylons/kai,Pylons/kai | import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = c.user.timezone
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
Add proper timezone data for trac | import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
from trac.util.datefmt import _tzoffsetmap
from pytz import timezone
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = _tzoffsetmap.get(c._tzinfo.utcoffset(None))
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
| <commit_before>import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = c.user.timezone
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
<commit_msg>Add proper timezone data for trac<commit_after> | import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
from trac.util.datefmt import _tzoffsetmap
from pytz import timezone
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = _tzoffsetmap.get(c._tzinfo.utcoffset(None))
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
| import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = c.user.timezone
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
Add proper timezone data for tracimport logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
from trac.util.datefmt import _tzoffsetmap
from pytz import timezone
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = _tzoffsetmap.get(c._tzinfo.utcoffset(None))
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
| <commit_before>import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = c.user.timezone
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
<commit_msg>Add proper timezone data for trac<commit_after>import logging
from pylons import response, config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
from trac.web import HTTPException
from trac.util.datefmt import _tzoffsetmap
from pytz import timezone
except:
pass
from kai.lib.base import BaseController, render
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
environ['REMOTE_EMAIL'] = c.user.email
environ['REMOTE_TZ'] = _tzoffsetmap.get(c._tzinfo.utcoffset(None))
environ['REMOTE_ID']= c.user.id
try:
return trac_app(environ, start_response)
except HTTPException, obj:
response.status_int = obj.code
response.write(obj.message)
return response(environ, start_response)
|
8964861c877797de7932978357ebe3c35eec6715 | qtui/master_page.py | qtui/master_page.py | from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
| from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
#TODO: Save current question on close
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
| Add a TODO for the Wizard | Add a TODO for the Wizard
| Python | mit | matcom/autoexam,matcom/autoexam,matcom/autoexam,matcom/autoexam,matcom/autoexam | from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
Add a TODO for the Wizard | from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
#TODO: Save current question on close
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
| <commit_before>from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
<commit_msg>Add a TODO for the Wizard<commit_after> | from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
#TODO: Save current question on close
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
| from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
Add a TODO for the Wizardfrom PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
#TODO: Save current question on close
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
| <commit_before>from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
<commit_msg>Add a TODO for the Wizard<commit_after>from PyQt4.QtGui import *
from PyQt4 import uic
import os
from os.path import join
import api
#TODO: Save current question on close
class MasterPage(QWizardPage):
path = "qtui/ui/page1_master.ui"
def __init__(self, project, parentW=None):
super(MasterPage, self).__init__()
self.ui = uic.loadUi(join(os.environ['AUTOEXAM_FOLDER'], self.path), self)
self.project = project
self.ui.questionWidget.initializeProject(project)
# self.ui.masterGenBtn.clicked.connect(self.gen_master)
self.parentWizard = parentW
def validatePage(self):
try:
tags, questions = self.ui.questionWidget.dump()
self.project.tags = tags
self.project.questions = questions
api.validate_project(self.project)
# TODO: Do at least one of the following:
# 1. Set the project total_questions_per_exam number before generating
# the master here (with a dialog or a modified ui)
# 2. Get the total_questions_per_exam number out of the master and into
# gen.py as a parameter (just like test_count)
# Uncomment when one of the above is done.
# master_data = api.render_master(self.project, TEMPLATE_PATH)
# api.save_master(master_data)
return True
except Exception as e:
self.diag = QMessageBox(QMessageBox.Warning, "Warning", str(e))
self.diag.show()
return False
|
08472dce69bb861f72684037c912625cf70546c1 | kibitzr/fetcher/shell.py | kibitzr/fetcher/shell.py | import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
return ok, stdout
| import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
if ok:
report = stdout
else:
report = u'\n'.join([stdout, stderr])
return ok, report
| Return stderr on script failure | Return stderr on script failure
| Python | mit | kibitzr/kibitzr,kibitzr/kibitzr | import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
return ok, stdout
Return stderr on script failure | import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
if ok:
report = stdout
else:
report = u'\n'.join([stdout, stderr])
return ok, report
| <commit_before>import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
return ok, stdout
<commit_msg>Return stderr on script failure<commit_after> | import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
if ok:
report = stdout
else:
report = u'\n'.join([stdout, stderr])
return ok, report
| import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
return ok, stdout
Return stderr on script failureimport sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
if ok:
report = stdout
else:
report = u'\n'.join([stdout, stderr])
return ok, report
| <commit_before>import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
return ok, stdout
<commit_msg>Return stderr on script failure<commit_after>import sh
import tempfile
import logging
logger = logging.getLogger(__name__)
def fetch_bash(conf, **kwargs):
code = conf['script']
logger.info("Executing bash fetcher")
logger.debug(code)
with tempfile.NamedTemporaryFile() as fp:
logger.debug("Saving code to %r", fp.name)
fp.write(code.encode('utf-8'))
fp.flush()
logger.debug("Launching script %r", fp.name)
result = sh.bash(fp.name)
stdout = result.stdout.decode('utf-8')
stderr = result.stderr.decode('utf-8')
logger.debug("Bash exit_code: %r", result.exit_code)
logger.debug("Bash stdout: %s", stdout)
logger.debug("Bash stderr: %s", stderr)
ok = (result.exit_code == 0)
if ok:
report = stdout
else:
report = u'\n'.join([stdout, stderr])
return ok, report
|
e11b3c344b52c84b5e86bdc381df2f359fe63dae | fparser/setup.py | fparser/setup.py |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
return config
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
config.add_data_files('log.config')
return config
| Add log.config to data files to fix installed fparser. | Add log.config to data files to fix installed fparser.
| Python | bsd-3-clause | dagss/f2py-g3,dagss/f2py-g3 |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
return config
Add log.config to data files to fix installed fparser. |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
config.add_data_files('log.config')
return config
| <commit_before>
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
return config
<commit_msg>Add log.config to data files to fix installed fparser.<commit_after> |
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
config.add_data_files('log.config')
return config
|
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
return config
Add log.config to data files to fix installed fparser.
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
config.add_data_files('log.config')
return config
| <commit_before>
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
return config
<commit_msg>Add log.config to data files to fix installed fparser.<commit_after>
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration('fparser',parent_package,top_path)
config.add_data_files('log.config')
return config
|
9e41011a5f164732ffd33ba5ca5edc7813735aeb | bundle_data.py | bundle_data.py | #!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
| Fix saving when number of items is less than configured bundle size | Fix saving when number of items is less than configured bundle size
| Python | apache-2.0 | baudm/HomographyNet | #!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
Fix saving when number of items is less than configured bundle size | #!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
<commit_msg>Fix saving when number of items is less than configured bundle size<commit_after> | #!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
Fix saving when number of items is less than configured bundle size#!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def main():
if len(sys.argv) != 4:
print('Usage: bundle_data.py <input dir> <output dir> <samples per bundle>')
exit(1)
p = sys.argv[1]
b = sys.argv[2]
lim = int(sys.argv[3])
x = []
y = []
for d in glob.glob(os.path.join(p, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
x = []
y = []
if __name__ == '__main__':
main()
<commit_msg>Fix saving when number of items is less than configured bundle size<commit_after>#!/usr/bin/env python
import pickle
import os.path
import glob
import uuid
import sys
import os.path
import numpy as np
def pack(b, x, y):
name = str(uuid.uuid4())
pack = os.path.join(b, name + '.npz')
with open(pack, 'wb') as f:
np.savez(f, images=np.stack(x), offsets=np.stack(y))
print('packed:', pack)
def main():
if len(sys.argv) < 4:
print('Usage: bundle_data.py <output dir> <samples per bundle> <input dir1> [input dir2] ...')
exit(1)
o = sys.argv[1]
lim = int(sys.argv[2])
inputs = sys.argv[3:]
x = []
y = []
for i in inputs:
for d in glob.glob(os.path.join(i, '*.dat')):
with open(d, 'rb') as f:
im, l = pickle.load(f)
x.append(im)
y.append(l)
if len(y) >= lim:
pack(o, x, y)
x = []
y = []
# Pack any leftovers
if x:
pack(o, x, y)
if __name__ == '__main__':
main()
|
1bf6211f2fd5aef99e529fdc0e714b1a36ace346 | gallery/util.py | gallery/util.py | import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi'
]
| import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi',
'cr2'
]
| Add CR2 to allowed files | Add CR2 to allowed files
| Python | mit | liam-middlebrook/gallery,liam-middlebrook/gallery,liam-middlebrook/gallery,liam-middlebrook/gallery | import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi'
]
Add CR2 to allowed files | import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi',
'cr2'
]
| <commit_before>import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi'
]
<commit_msg>Add CR2 to allowed files<commit_after> | import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi',
'cr2'
]
| import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi'
]
Add CR2 to allowed filesimport os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi',
'cr2'
]
| <commit_before>import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi'
]
<commit_msg>Add CR2 to allowed files<commit_after>import os
from addict import Dict
from gallery.models import File
def get_dir_file_contents(dir_id):
print(File.query.filter(File.parent == dir_id).all())
return File.query.filter(File.parent == dir_id).all()
def get_dir_tree_dict():
path = os.path.normpath("/gallery-data/root")
file_tree = Dict()
for root, _, files in os.walk(path, topdown=True):
path = root.split('/')
path.pop(0)
file_tree_fd = file_tree
for part in path:
file_tree_fd = file_tree_fd[part]
file_tree_fd['.'] = files
return file_tree
def convert_bytes_to_utf8(dic):
for key in dic:
if isinstance(key, bytes):
k = key.decode('utf-8')
v = dic[key]
del dic[key]
dic[k] = v
if isinstance(dic[key], bytes):
v = dic[key].decode('utf-8')
dic[key] = v
return dic
def allowed_file(filename):
return '.' in filename and filename.lower().rsplit('.', 1)[1] in \
[
'txt',
'png',
'jpg',
'jpeg',
'mpg',
'mp4',
'avi',
'cr2'
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.