commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bfcce626990c53aef7fbe534f1f6ff1092945597 | docs/settings.py | docs/settings.py | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
'django_extensions'
) | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
# 'django_extensions'
) | Fix read the docs failure | Fix read the docs failure
| Python | bsd-3-clause | consbio/tablo | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
'django_extensions'
)Fix read the docs failure | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
# 'django_extensions'
) | <commit_before>DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
'django_extensions'
)<commit_msg>Fix read the docs failure<commit_after> | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
# 'django_extensions'
) | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
'django_extensions'
)Fix read the docs failureDEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
# 'django_extensions'
) | <commit_before>DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
'django_extensions'
)<commit_msg>Fix read the docs failure<commit_after>DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'test.db',
}
}
USE_TZ = False
SITE_ID = 1
STATIC_URL = '/static/'
SECRET_KEY = 'NOT_SO_SECRET'
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'tastypie',
'tablo',
# 'django_extensions'
) |
7f0ab829f677a5d91d5b24dc6181a2519e25a934 | notes/managers.py | notes/managers.py | #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
| #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
| Support hiding note and notebook template notes | Support hiding note and notebook template notes
| Python | agpl-3.0 | syskill/snowy,NoUsername/PrivateNotesExperimental,GNOME/snowy,jaredjennings/snowy,jaredjennings/snowy,sandyarmstrong/snowy,jaredjennings/snowy,NoUsername/PrivateNotesExperimental,widox/snowy,syskill/snowy,leonhandreke/snowy,leonhandreke/snowy,jaredjennings/snowy,widox/snowy,GNOME/snowy,sandyarmstrong/snowy | #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
Support hiding note and notebook template notes | #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
| <commit_before>#
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
<commit_msg>Support hiding note and notebook template notes<commit_after> | #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
| #
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
Support hiding note and notebook template notes#
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
| <commit_before>#
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
<commit_msg>Support hiding note and notebook template notes<commit_after>#
# Copyright (c) 2009 Brad Taylor <brad@getcoded.net>
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
|
cd5c56583c84b2b0fd05d743578193b7b681151c | nn/embedding/embeddings.py | nn/embedding/embeddings.py | import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
| import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
from ..util import func_scope
@func_scope()
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
@func_scope()
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
| Add func_scope to embedding functions | Add func_scope to embedding functions
| Python | unlicense | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
Add func_scope to embedding functions | import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
from ..util import func_scope
@func_scope()
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
@func_scope()
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
| <commit_before>import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
<commit_msg>Add func_scope to embedding functions<commit_after> | import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
from ..util import func_scope
@func_scope()
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
@func_scope()
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
| import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
Add func_scope to embedding functionsimport tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
from ..util import func_scope
@func_scope()
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
@func_scope()
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
| <commit_before>import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
<commit_msg>Add func_scope to embedding functions<commit_after>import tensorflow as tf
from ..flags import FLAGS
from ..variable import variable
from ..util import func_scope
@func_scope()
def embeddings(*, id_space_size, embedding_size, name=None):
return variable([id_space_size, embedding_size], name=name)
@func_scope()
def word_embeddings(name="word_embeddings"):
if FLAGS.word_embeddings is None:
return embeddings(id_space_size=FLAGS.word_space_size,
embedding_size=FLAGS.word_embedding_size,
name=name)
return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type),
name=name)
|
fa418ed5a6769a369d4b5cddfc6e215f551c57cf | events/cms_app.py | events/cms_app.py | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
| from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
| Add namespace to support djangoCMS v3 | Add namespace to support djangoCMS v3
| Python | bsd-3-clause | theherk/django-theherk-events | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
Add namespace to support djangoCMS v3 | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
| <commit_before>from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
<commit_msg>Add namespace to support djangoCMS v3<commit_after> | from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
| from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
Add namespace to support djangoCMS v3from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
| <commit_before>from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
apphook_pool.register(EventsApphook)
<commit_msg>Add namespace to support djangoCMS v3<commit_after>from cms.app_base import CMSApp
from cms.apphook_pool import apphook_pool
from django.utils.translation import ugettext_lazy as _
class EventsApphook(CMSApp):
name = _("Events Apphook")
urls = ["events.urls"]
app_name = "events"
namespace = "events"
apphook_pool.register(EventsApphook)
|
2c742cd00e21412d034bb0cffed0bafca2e6c1ae | inspector/inspector/urls.py | inspector/inspector/urls.py | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[\.A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| Allow dots in module names. | Allow dots in module names.
| Python | bsd-2-clause | refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,refreshoxford/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector,abhijo89/django-cbv-inspector | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Allow dots in module names. | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[\.A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| <commit_before>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Allow dots in module names.<commit_after> | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[\.A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
Allow dots in module names.from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[\.A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| <commit_before>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<commit_msg>Allow dots in module names.<commit_after>from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.views.generic import TemplateView
from cbv.views import KlassDetailView
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html'), name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'class/(?P<package>[a-zA-Z_-]+)/(?P<version>[^/]+)/(?P<module>[\.A-Za-z_-]+)/(?P<klass>[A-Za-z_-]*)', KlassDetailView.as_view()),
)
urlpatterns += staticfiles_urlpatterns() + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
c0b3a482b8ef5284070da1398350acf936e50121 | rplugin/python3/deoplete/sources/LanguageClientSource.py | rplugin/python3/deoplete/sources/LanguageClientSource.py | import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
return re.sub(r'(?<!\\)\$\d+', '', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
| import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
'<`\g<num>:\g<desc>`>', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
| Replace placeholders in completion text | Replace placeholders in completion text
In deoplete source, replace the placeholders with the neosnippet format.
| Python | mit | autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim,autozimu/LanguageClient-neovim | import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
return re.sub(r'(?<!\\)\$\d+', '', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
Replace placeholders in completion text
In deoplete source, replace the placeholders with the neosnippet format. | import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
'<`\g<num>:\g<desc>`>', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
| <commit_before>import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
return re.sub(r'(?<!\\)\$\d+', '', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
<commit_msg>Replace placeholders in completion text
In deoplete source, replace the placeholders with the neosnippet format.<commit_after> | import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
'<`\g<num>:\g<desc>`>', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
| import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
return re.sub(r'(?<!\\)\$\d+', '', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
Replace placeholders in completion text
In deoplete source, replace the placeholders with the neosnippet format.import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
'<`\g<num>:\g<desc>`>', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
| <commit_before>import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
return re.sub(r'(?<!\\)\$\d+', '', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
<commit_msg>Replace placeholders in completion text
In deoplete source, replace the placeholders with the neosnippet format.<commit_after>import re
from .base import Base
CompleteResults = "g:LanguageClient_completeResults"
def simplify_snippet(snip: str) -> str:
snip = re.sub(r'(?<!\\)\$(?P<num>\d+)', '<`\g<num>`>', snip)
return re.sub(r'(?<!\\)\${(?P<num>\d+):(?P<desc>.+?)}',
'<`\g<num>:\g<desc>`>', snip)
class Source(Base):
def __init__(self, vim):
super().__init__(vim)
self.name = "LanguageClient"
self.mark = "[LC]"
self.rank = 1000
self.filetypes = vim.eval(
"get(g:, 'LanguageClient_serverCommands', {})").keys()
self.min_pattern_length = 1
self.input_pattern = r'(\.|::)\w*'
def gather_candidates(self, context):
if not context["is_async"]:
context["is_async"] = True
self.vim.funcs.LanguageClient_omniComplete()
return []
elif self.vim.funcs.eval("len({})".format(CompleteResults)) == 0:
return []
context["is_async"] = False
result = self.vim.funcs.eval("remove({}, 0)".format(CompleteResults))
if result is None:
result = []
return result
|
5b3d38821517f10f9b9da31f28af19e7302de954 | dimod/reference/composites/structure.py | dimod/reference/composites/structure.py | from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
todo
"""
def __init__(self, sampler, nodelist, edgelist):
Sampler.__init__(self)
Composite.__init__(self, sampler)
Structured.__init__(self, nodelist, edgelist)
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
| from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
"""
# we will override these in the __init__, but because they are abstract properties we need to
# signal that we are overriding them
edgelist = None
nodelist = None
children = None
def __init__(self, sampler, nodelist, edgelist):
self.children = [sampler]
self.nodelist = nodelist
self.edgelist = edgelist
@property
def parameters(self):
return self.child.parameters
@property
def properties(self):
return self.child.properties
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
| Update Structure composite to use the new abc | Update Structure composite to use the new abc
| Python | apache-2.0 | oneklc/dimod,oneklc/dimod | from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
todo
"""
def __init__(self, sampler, nodelist, edgelist):
Sampler.__init__(self)
Composite.__init__(self, sampler)
Structured.__init__(self, nodelist, edgelist)
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
Update Structure composite to use the new abc | from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
"""
# we will override these in the __init__, but because they are abstract properties we need to
# signal that we are overriding them
edgelist = None
nodelist = None
children = None
def __init__(self, sampler, nodelist, edgelist):
self.children = [sampler]
self.nodelist = nodelist
self.edgelist = edgelist
@property
def parameters(self):
return self.child.parameters
@property
def properties(self):
return self.child.properties
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
| <commit_before>from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
todo
"""
def __init__(self, sampler, nodelist, edgelist):
Sampler.__init__(self)
Composite.__init__(self, sampler)
Structured.__init__(self, nodelist, edgelist)
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
<commit_msg>Update Structure composite to use the new abc<commit_after> | from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
"""
# we will override these in the __init__, but because they are abstract properties we need to
# signal that we are overriding them
edgelist = None
nodelist = None
children = None
def __init__(self, sampler, nodelist, edgelist):
self.children = [sampler]
self.nodelist = nodelist
self.edgelist = edgelist
@property
def parameters(self):
return self.child.parameters
@property
def properties(self):
return self.child.properties
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
| from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
todo
"""
def __init__(self, sampler, nodelist, edgelist):
Sampler.__init__(self)
Composite.__init__(self, sampler)
Structured.__init__(self, nodelist, edgelist)
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
Update Structure composite to use the new abcfrom dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
"""
# we will override these in the __init__, but because they are abstract properties we need to
# signal that we are overriding them
edgelist = None
nodelist = None
children = None
def __init__(self, sampler, nodelist, edgelist):
self.children = [sampler]
self.nodelist = nodelist
self.edgelist = edgelist
@property
def parameters(self):
return self.child.parameters
@property
def properties(self):
return self.child.properties
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
| <commit_before>from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
todo
"""
def __init__(self, sampler, nodelist, edgelist):
Sampler.__init__(self)
Composite.__init__(self, sampler)
Structured.__init__(self, nodelist, edgelist)
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
<commit_msg>Update Structure composite to use the new abc<commit_after>from dimod.core.sampler import Sampler
from dimod.core.composite import Composite
from dimod.core.structured import Structured
from dimod.decorators import bqm_structured
class StructureComposite(Sampler, Composite, Structured):
"""Creates a structured composed sampler from an unstructured sampler.
"""
# we will override these in the __init__, but because they are abstract properties we need to
# signal that we are overriding them
edgelist = None
nodelist = None
children = None
def __init__(self, sampler, nodelist, edgelist):
self.children = [sampler]
self.nodelist = nodelist
self.edgelist = edgelist
@property
def parameters(self):
return self.child.parameters
@property
def properties(self):
return self.child.properties
@bqm_structured
def sample(self, bqm, **sample_kwargs):
return self.child.sample(bqm, **sample_kwargs)
|
1a00b905b02d47ef8778a4744d9ef67de5149017 | web/premises/constants.py | web/premises/constants.py | FEATURED_CONTENT_COUNT = 30
NEWS_CONTENT_COUNT = 30
UPDATED_CONTENT_COUNT = 30
MAX_PREMISE_CONTENT_LENGTH = 300 | FEATURED_CONTENT_COUNT = 50
NEWS_CONTENT_COUNT = 50
UPDATED_CONTENT_COUNT = 50
MAX_PREMISE_CONTENT_LENGTH = 300 | Change contention counts on front-page | Change contention counts on front-page
| Python | mit | taiansu/arguman.org,beratdogan/arguman.org,bahattincinic/arguman.org,arguman/arguman.org,Arthur2e5/arguman.org,taiansu/arguman.org,donkawechico/arguman.org,bahattincinic/arguman.org,Arthur2e5/arguman.org,arguman/arguman.org,Arthur2e5/arguman.org,Arthur2e5/arguman.org,donkawechico/arguman.org,bahattincinic/arguman.org,arguman/arguman.org,omeripek/arguman.org,taiansu/arguman.org,donkawechico/arguman.org,arguman/arguman.org,beratdogan/arguman.org,taiansu/arguman.org,bahattincinic/arguman.org,omeripek/arguman.org,omeripek/arguman.org,donkawechico/arguman.org | FEATURED_CONTENT_COUNT = 30
NEWS_CONTENT_COUNT = 30
UPDATED_CONTENT_COUNT = 30
MAX_PREMISE_CONTENT_LENGTH = 300Change contention counts on front-page | FEATURED_CONTENT_COUNT = 50
NEWS_CONTENT_COUNT = 50
UPDATED_CONTENT_COUNT = 50
MAX_PREMISE_CONTENT_LENGTH = 300 | <commit_before>FEATURED_CONTENT_COUNT = 30
NEWS_CONTENT_COUNT = 30
UPDATED_CONTENT_COUNT = 30
MAX_PREMISE_CONTENT_LENGTH = 300<commit_msg>Change contention counts on front-page<commit_after> | FEATURED_CONTENT_COUNT = 50
NEWS_CONTENT_COUNT = 50
UPDATED_CONTENT_COUNT = 50
MAX_PREMISE_CONTENT_LENGTH = 300 | FEATURED_CONTENT_COUNT = 30
NEWS_CONTENT_COUNT = 30
UPDATED_CONTENT_COUNT = 30
MAX_PREMISE_CONTENT_LENGTH = 300Change contention counts on front-pageFEATURED_CONTENT_COUNT = 50
NEWS_CONTENT_COUNT = 50
UPDATED_CONTENT_COUNT = 50
MAX_PREMISE_CONTENT_LENGTH = 300 | <commit_before>FEATURED_CONTENT_COUNT = 30
NEWS_CONTENT_COUNT = 30
UPDATED_CONTENT_COUNT = 30
MAX_PREMISE_CONTENT_LENGTH = 300<commit_msg>Change contention counts on front-page<commit_after>FEATURED_CONTENT_COUNT = 50
NEWS_CONTENT_COUNT = 50
UPDATED_CONTENT_COUNT = 50
MAX_PREMISE_CONTENT_LENGTH = 300 |
9d7beff62a3555aa4be51cefb2f54681070d1305 | ircstat/plugins/__init__.py | ircstat/plugins/__init__.py | # Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
importlib.import_module('ircstat.plugins.' + name)
plugins = set(plugin() for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
| # Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
plugins = set(plugin(config=config) for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
# import plugins
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
module = importlib.import_module('ircstat.plugins.' + name)
for name, item in module.__dict__.items():
if type(item) == type and issubclass(item, Plugin):
item.name = item.__name__
if item.name.endswith('Plugin'):
item.name = item.name[:-6]
| Make sure plugins' .name property gets set | Make sure plugins' .name property gets set
| Python | mit | jreese/ircstat,jreese/ircstat | # Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
importlib.import_module('ircstat.plugins.' + name)
plugins = set(plugin() for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
Make sure plugins' .name property gets set | # Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
plugins = set(plugin(config=config) for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
# import plugins
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
module = importlib.import_module('ircstat.plugins.' + name)
for name, item in module.__dict__.items():
if type(item) == type and issubclass(item, Plugin):
item.name = item.__name__
if item.name.endswith('Plugin'):
item.name = item.name[:-6]
| <commit_before># Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
importlib.import_module('ircstat.plugins.' + name)
plugins = set(plugin() for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
<commit_msg>Make sure plugins' .name property gets set<commit_after> | # Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
plugins = set(plugin(config=config) for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
# import plugins
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
module = importlib.import_module('ircstat.plugins.' + name)
for name, item in module.__dict__.items():
if type(item) == type and issubclass(item, Plugin):
item.name = item.__name__
if item.name.endswith('Plugin'):
item.name = item.name[:-6]
| # Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
importlib.import_module('ircstat.plugins.' + name)
plugins = set(plugin() for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
Make sure plugins' .name property gets set# Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
plugins = set(plugin(config=config) for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
# import plugins
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
module = importlib.import_module('ircstat.plugins.' + name)
for name, item in module.__dict__.items():
if type(item) == type and issubclass(item, Plugin):
item.name = item.__name__
if item.name.endswith('Plugin'):
item.name = item.name[:-6]
| <commit_before># Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
importlib.import_module('ircstat.plugins.' + name)
plugins = set(plugin() for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
<commit_msg>Make sure plugins' .name property gets set<commit_after># Copyright 2013 John Reese
# Licensed under the MIT license
import importlib
import os
from functools import lru_cache
from os import path
from .base import Plugin
@lru_cache()
def load_plugins(config):
"""Import all the plugins, and return a set of plugin instances, filtering
out any plugin whose name appears in the configurable blacklist."""
plugins = set(plugin(config=config) for plugin in Plugin.subclasses())
for plugin in plugins:
if plugin.name in config.plugin_blacklist:
plugins.remove(plugin)
return plugins
# import plugins
cwd = path.abspath(path.dirname(__file__))
files = os.listdir(cwd)
for filename in files:
name, ext = path.splitext(filename)
if name.startswith('_'):
continue
if ext == '.py':
module = importlib.import_module('ircstat.plugins.' + name)
for name, item in module.__dict__.items():
if type(item) == type and issubclass(item, Plugin):
item.name = item.__name__
if item.name.endswith('Plugin'):
item.name = item.name[:-6]
|
ad8f54f7a2532b0ad643790d061b4e488ac7849d | patchboard/tests/functional/trivial_example_test.py | patchboard/tests/functional/trivial_example_test.py | # trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
@pytest.mark.xfail
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
| # trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
| Remove xfail from working test | Remove xfail from working test
| Python | mit | patchboard/patchboard-py | # trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
@pytest.mark.xfail
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
Remove xfail from working test | # trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
| <commit_before># trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
@pytest.mark.xfail
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
<commit_msg>Remove xfail from working test<commit_after> | # trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
| # trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
@pytest.mark.xfail
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
Remove xfail from working test# trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
| <commit_before># trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
@pytest.mark.xfail
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
<commit_msg>Remove xfail from working test<commit_after># trivial_example.py
#
# Copyright 2014 BitVault.
#
# Reproduces the tests in trivial_example.rb
from __future__ import print_function
import pytest
from random import randint
from patchboard.tests.fixtures import (trivial_net_pb,
trivial_net_resources,
trivial_net_users)
pytest.mark.usefixtures(trivial_net_pb,
trivial_net_resources,
trivial_net_users)
def test_users_create(trivial_net_users):
login = "foo-{0}".format(randint(1, 100000))
user = trivial_net_users.create({u'login': login})
|
1e79cc27ae0025d9ba51eff2828cc25247c08d3c | ssbench/worker.py | ssbench/worker.py | import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
print "BOO"
| import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
raise NameError("Unknown job type %r" % (job_data['type'],))
| Raise error on unknown job type | Raise error on unknown job type
| Python | apache-2.0 | charz/ssbench,swiftstack/ssbench,charz/ssbench,swiftstack/ssbench | import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
print "BOO"
Raise error on unknown job type | import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
raise NameError("Unknown job type %r" % (job_data['type'],))
| <commit_before>import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
print "BOO"
<commit_msg>Raise error on unknown job type<commit_after> | import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
raise NameError("Unknown job type %r" % (job_data['type'],))
| import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
print "BOO"
Raise error on unknown job typeimport yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
raise NameError("Unknown job type %r" % (job_data['type'],))
| <commit_before>import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
print "BOO"
<commit_msg>Raise error on unknown job type<commit_after>import yaml
from ssbench.constants import *
class Worker:
def __init__(self, queue):
queue.use(STATS_TUBE)
self.queue = queue
def go(self):
job = self.queue.reserve()
while job:
job.delete() # avoid any job-timeout nonsense
self.handle_job(job)
job = self.queue.reserve()
def handle_job(self, job):
job_data = yaml.load(job.body)
if job_data['type'] == UPLOAD_OBJECT:
print "WOO" # magic goes here
else:
raise NameError("Unknown job type %r" % (job_data['type'],))
|
168dab45955499aac320c859ff63a43cb20d9145 | fitz/__init__.py | fitz/__init__.py | from frontend import *
import tools
version = "0.0.1.dev"
__version__ = version
| from frontend import *
import tools
version = "0.0.1-dev"
__version__ = version
| Switch version label to comply with semver | Switch version label to comply with semver | Python | bsd-3-clause | kastman/fitz | from frontend import *
import tools
version = "0.0.1.dev"
__version__ = version
Switch version label to comply with semver | from frontend import *
import tools
version = "0.0.1-dev"
__version__ = version
| <commit_before>from frontend import *
import tools
version = "0.0.1.dev"
__version__ = version
<commit_msg>Switch version label to comply with semver<commit_after> | from frontend import *
import tools
version = "0.0.1-dev"
__version__ = version
| from frontend import *
import tools
version = "0.0.1.dev"
__version__ = version
Switch version label to comply with semverfrom frontend import *
import tools
version = "0.0.1-dev"
__version__ = version
| <commit_before>from frontend import *
import tools
version = "0.0.1.dev"
__version__ = version
<commit_msg>Switch version label to comply with semver<commit_after>from frontend import *
import tools
version = "0.0.1-dev"
__version__ = version
|
0be21659f1190e41db9b9818dd694015d94f78cc | zerodb/models/__init__.py | zerodb/models/__init__.py | import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
| import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
# This set will go to metaclass
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
# This set will go to metaclass
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
# Indexed fields which have default values
default_fields = indexed_fields.difference(required_fields).difference(kw)
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
for field in default_fields:
setattr(self, field, getattr(self.__class__, field).default)
for field, value in kw.iteritems():
setattr(self, field, value)
| Set model instance attributes based on fields | Set model instance attributes based on fields
| Python | agpl-3.0 | zerodb/zerodb,zero-db/zerodb,zero-db/zerodb,zerodb/zerodb | import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
Set model instance attributes based on fields | import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
# This set will go to metaclass
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
# This set will go to metaclass
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
# Indexed fields which have default values
default_fields = indexed_fields.difference(required_fields).difference(kw)
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
for field in default_fields:
setattr(self, field, getattr(self.__class__, field).default)
for field, value in kw.iteritems():
setattr(self, field, value)
| <commit_before>import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
<commit_msg>Set model instance attributes based on fields<commit_after> | import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
# This set will go to metaclass
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
# This set will go to metaclass
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
# Indexed fields which have default values
default_fields = indexed_fields.difference(required_fields).difference(kw)
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
for field in default_fields:
setattr(self, field, getattr(self.__class__, field).default)
for field, value in kw.iteritems():
setattr(self, field, value)
| import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
Set model instance attributes based on fieldsimport persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
# This set will go to metaclass
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
# This set will go to metaclass
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
# Indexed fields which have default values
default_fields = indexed_fields.difference(required_fields).difference(kw)
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
for field in default_fields:
setattr(self, field, getattr(self.__class__, field).default)
for field, value in kw.iteritems():
setattr(self, field, value)
| <commit_before>import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
<commit_msg>Set model instance attributes based on fields<commit_after>import persistent
import indexable
import exceptions
class Model(persistent.Persistent):
"""
Data model to easily create indexable persistent objects.
If an object declares a property from indexable, this property is indexed.
*All* other properties are stored but unindexed
Example:
>>> class Page(Model):
... title = indexable.Field()
... text = indexable.Text()
... page = Page(title="Hello", text="World", extra=12345)
"""
def __init__(self, **kw):
# This set will go to metaclass
indexed_fields = set(filter(lambda key:
not key.startswith("_") and
isinstance(getattr(self.__class__, key), indexable.Indexable),
self.__class__.__dict__.keys()))
# This set will go to metaclass
required_fields = set(filter(lambda key: getattr(self.__class__, key).default is None, indexed_fields))
# Indexed fields which have default values
default_fields = indexed_fields.difference(required_fields).difference(kw)
missed_fields = required_fields.difference(kw)
if missed_fields:
raise exceptions.ModelException("You should provide fields: " + ", ".join(map(str, missed_fields)))
for field in default_fields:
setattr(self, field, getattr(self.__class__, field).default)
for field, value in kw.iteritems():
setattr(self, field, value)
|
b378102284bbbcc9ad909a7393dfffa24377ce27 | ginga/__init__.py | ginga/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Set up the version
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = 'unknown'
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
| Fix version import and test runner | BUG: Fix version import and test runner
| Python | bsd-3-clause | pllim/ginga,pllim/ginga,ejeschke/ginga,naojsoft/ginga,ejeschke/ginga,ejeschke/ginga,pllim/ginga,naojsoft/ginga,naojsoft/ginga | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Set up the version
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = 'unknown'
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
BUG: Fix version import and test runner | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Set up the version
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = 'unknown'
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
<commit_msg>BUG: Fix version import and test runner<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Set up the version
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = 'unknown'
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
BUG: Fix version import and test runner# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Set up the version
from pkg_resources import get_distribution, DistributionNotFound
try:
__version__ = get_distribution(__name__).version
except DistributionNotFound:
# package is not installed
__version__ = 'unknown'
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
<commit_msg>BUG: Fix version import and test runner<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""See LONG_DESC.txt"""
# Packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
try:
# As long as we're using setuptools/distribute, we need to do this the
# setuptools way or else pkg_resources will throw up unnecessary and
# annoying warnings (even though the namespace mechanism will still
# otherwise work without it).
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
pass
__path__ = __import__('pkgutil').extend_path(__path__, __name__) # noqa
# END
|
5bc8ad66b59312d7459ec9c4d58d89bee0038a48 | conference_scheduler/scheduler.py | conference_scheduler/scheduler.py | import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
problem.solve()
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
| import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
problem.solve()
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
| Correct order of constraint assignment and problem solution | Correct order of constraint assignment and problem solution
| Python | mit | PyconUK/ConferenceScheduler | import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
problem.solve()
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
Correct order of constraint assignment and problem solution | import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
problem.solve()
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
| <commit_before>import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
problem.solve()
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
<commit_msg>Correct order of constraint assignment and problem solution<commit_after> | import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
problem.solve()
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
| import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
problem.solve()
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
Correct order of constraint assignment and problem solutionimport pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
problem.solve()
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
| <commit_before>import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
problem.solve()
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
<commit_msg>Correct order of constraint assignment and problem solution<commit_after>import pulp
from typing import Sequence
import conference_scheduler.parameters as params
def is_valid_schedule(schedule):
"""Validate an existing schedule against a problem
Parameters
---------
schedule : iterable
of resources.ScheduledItem
Returns
-------
bool
True if schedule is valid. False otherwise
"""
if len(schedule) == 0:
return False
return True
def schedule(
events: Sequence, rooms: Sequence, slots: Sequence,
existing: Sequence = None
):
"""Compute a new, valid, optimised schedule
Parameters
----------
events : List or Tuple
of resources.Event
rooms : List or Tuple
of resources.Room
slots : List or Tuple
of resources.Slot
existing : iterable
of resources.ScheduledItem.
Represents an existing schedule.
If provided, the returned schedule will be optimised to minimise
changes from this schedule
Returns
-------
iterable
of resources.ScheduledItem
"""
problem = pulp.LpProblem()
variables = params.variables(events, rooms, slots)
for constraint in params.constraints(variables, events, rooms, slots):
problem += constraint
problem.solve()
return [
scheduled_item for scheduled_item, variable in variables.items()
if variable.value() > 0
]
|
6d7e6d649302523ccbe0373b172b4a4fa8990a45 | dimod/__init__.py | dimod/__init__.py | from __future__ import absolute_import
import sys
__version__ = '0.3.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
| from __future__ import absolute_import
import sys
__version__ = '0.4.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
| Increment version 0.3.1 -> 0.4.0 | Increment version 0.3.1 -> 0.4.0 | Python | apache-2.0 | oneklc/dimod,oneklc/dimod | from __future__ import absolute_import
import sys
__version__ = '0.3.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
Increment version 0.3.1 -> 0.4.0 | from __future__ import absolute_import
import sys
__version__ = '0.4.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
| <commit_before>from __future__ import absolute_import
import sys
__version__ = '0.3.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
<commit_msg>Increment version 0.3.1 -> 0.4.0<commit_after> | from __future__ import absolute_import
import sys
__version__ = '0.4.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
| from __future__ import absolute_import
import sys
__version__ = '0.3.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
Increment version 0.3.1 -> 0.4.0from __future__ import absolute_import
import sys
__version__ = '0.4.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
| <commit_before>from __future__ import absolute_import
import sys
__version__ = '0.3.1'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
<commit_msg>Increment version 0.3.1 -> 0.4.0<commit_after>from __future__ import absolute_import
import sys
__version__ = '0.4.0'
__author__ = 'D-Wave Systems Inc.'
__authoremail__ = 'acondello@dwavesys.com'
__description__ = 'A shared API for binary quadratic model samplers.'
_PY2 = sys.version_info[0] == 2
from dimod.template_sampler import *
from dimod.samplers import *
import dimod.samplers
from dimod.template_composite import *
from dimod.composites import *
import dimod.composites
from dimod.template_response import *
from dimod.responses import *
import dimod.responses
from dimod.utilities import *
import dimod.utilities
from dimod.keyword_arguments import *
import dimod.keyword_arguments
import dimod.decorators
|
58b46dbc62c98372ed300eeb20b5ecb80a11ddb3 | test/test-mime.py | test/test-mime.py | from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
| from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
| Test getting comment for Mime type | Test getting comment for Mime type
| Python | lgpl-2.1 | 0312birdzhang/pyxdg | from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
Test getting comment for Mime type | from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
| <commit_before>from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
<commit_msg>Test getting comment for Mime type<commit_after> | from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
| from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
Test getting comment for Mime typefrom xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
| <commit_before>from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
<commit_msg>Test getting comment for Mime type<commit_after>from xdg import Mime
import unittest
import os.path
import tempfile, shutil
import resources
class MimeTest(unittest.TestCase):
def test_get_type_by_name(self):
appzip = Mime.get_type_by_name("foo.zip")
self.assertEqual(appzip.media, "application")
self.assertEqual(appzip.subtype, "zip")
def test_get_type_by_data(self):
imgpng = Mime.get_type_by_data(resources.png_data)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
def test_get_type_by_contents(self):
tmpdir = tempfile.mkdtemp()
try:
test_file = os.path.join(tmpdir, "test")
with open(test_file, "wb") as f:
f.write(resources.png_data)
imgpng = Mime.get_type_by_contents(test_file)
self.assertEqual(imgpng.media, "image")
self.assertEqual(imgpng.subtype, "png")
finally:
shutil.rmtree(tmpdir)
def test_lookup(self):
pdf1 = Mime.lookup("application/pdf")
pdf2 = Mime.lookup("application", "pdf")
self.assertEqual(pdf1, pdf2)
self.assertEqual(pdf1.media, "application")
self.assertEqual(pdf1.subtype, "pdf")
pdf1.get_comment()
|
f61c0a33a79fa4670874f4469e7ceb76c644bf4b | lambda_local/environment_variables.py | lambda_local/environment_variables.py | import json
import os
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
for env_name, env_value in env_vars.items():
os.environ[str(env_name)] = str(env_value)
| import json
import os
def export_variables(environment_variables):
for env_name, env_value in environment_variables.items():
os.environ[str(env_name)] = str(env_value)
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
export_variables(env_vars)
| Split the parsing of input and the exporting of the variables for reuse | Split the parsing of input and the exporting of the variables for
reuse
| Python | mit | HDE/python-lambda-local,HDE/python-lambda-local | import json
import os
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
for env_name, env_value in env_vars.items():
os.environ[str(env_name)] = str(env_value)
Split the parsing of input and the exporting of the variables for
reuse | import json
import os
def export_variables(environment_variables):
for env_name, env_value in environment_variables.items():
os.environ[str(env_name)] = str(env_value)
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
export_variables(env_vars)
| <commit_before>import json
import os
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
for env_name, env_value in env_vars.items():
os.environ[str(env_name)] = str(env_value)
<commit_msg>Split the parsing of input and the exporting of the variables for
reuse<commit_after> | import json
import os
def export_variables(environment_variables):
for env_name, env_value in environment_variables.items():
os.environ[str(env_name)] = str(env_value)
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
export_variables(env_vars)
| import json
import os
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
for env_name, env_value in env_vars.items():
os.environ[str(env_name)] = str(env_value)
Split the parsing of input and the exporting of the variables for
reuseimport json
import os
def export_variables(environment_variables):
for env_name, env_value in environment_variables.items():
os.environ[str(env_name)] = str(env_value)
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
export_variables(env_vars)
| <commit_before>import json
import os
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
for env_name, env_value in env_vars.items():
os.environ[str(env_name)] = str(env_value)
<commit_msg>Split the parsing of input and the exporting of the variables for
reuse<commit_after>import json
import os
def export_variables(environment_variables):
for env_name, env_value in environment_variables.items():
os.environ[str(env_name)] = str(env_value)
def set_environment_variables(json_file_path):
"""
Read and set environment variables from a flat json file.
Bear in mind that env vars set this way and later on read using
`os.getenv` function will be strings since after all env vars are just
that - plain strings.
Json file example:
```
{
"FOO": "bar",
"BAZ": true
}
```
:param json_file_path: path to flat json file
:type json_file_path: str
"""
if json_file_path:
with open(json_file_path) as json_file:
env_vars = json.loads(json_file.read())
export_variables(env_vars)
|
3598b974ecc078f34e54a32b06e16af8ccaf839b | opps/core/admin/__init__.py | opps/core/admin/__init__.py | # -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
| # -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
from opps.core.admin.source import *
| Add source admin in Admin Opps Core | Add source admin in Admin Opps Core
| Python | mit | opps/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,williamroot/opps,opps/opps | # -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
Add source admin in Admin Opps Core | # -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
from opps.core.admin.source import *
| <commit_before># -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
<commit_msg>Add source admin in Admin Opps Core<commit_after> | # -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
from opps.core.admin.source import *
| # -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
Add source admin in Admin Opps Core# -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
from opps.core.admin.source import *
| <commit_before># -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
<commit_msg>Add source admin in Admin Opps Core<commit_after># -*- coding: utf-8 -*-
from opps.core.admin.channel import *
from opps.core.admin.profile import *
from opps.core.admin.source import *
|
83406db629abd389da85666dc79925f8e03a22f4 | lms/djangoapps/courseware/__init__.py | lms/djangoapps/courseware/__init__.py | #pylint: disable=missing-docstring
from __future__ import absolute_import
import warnings
if __name__ == 'courseware':
warnings.warn("Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported", DeprecationWarning)
| #pylint: disable=missing-docstring
from __future__ import absolute_import
import inspect
import warnings
if __name__ == 'courseware':
# pylint: disable=unicode-format-string
# Show the call stack that imported us wrong.
stack = "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in inspect.stack()[:0:-1])
msg = "Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported:\n" + stack
warnings.warn(msg, DeprecationWarning)
| Add more diagnostics to the courseware import warning | Add more diagnostics to the courseware import warning
| Python | agpl-3.0 | angelapper/edx-platform,angelapper/edx-platform,cpennington/edx-platform,edx-solutions/edx-platform,mitocw/edx-platform,edx/edx-platform,cpennington/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,edx/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,edx-solutions/edx-platform,stvstnfrd/edx-platform,arbrandes/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,cpennington/edx-platform,msegado/edx-platform,mitocw/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,edx/edx-platform,stvstnfrd/edx-platform,cpennington/edx-platform,msegado/edx-platform,ESOedX/edx-platform,ESOedX/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,msegado/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,edx/edx-platform,edx-solutions/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,mitocw/edx-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,appsembler/edx-platform,mitocw/edx-platform,eduNEXT/edunext-platform,appsembler/edx-platform,EDUlib/edx-platform,edx-solutions/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform | #pylint: disable=missing-docstring
from __future__ import absolute_import
import warnings
if __name__ == 'courseware':
warnings.warn("Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported", DeprecationWarning)
Add more diagnostics to the courseware import warning | #pylint: disable=missing-docstring
from __future__ import absolute_import
import inspect
import warnings
if __name__ == 'courseware':
# pylint: disable=unicode-format-string
# Show the call stack that imported us wrong.
stack = "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in inspect.stack()[:0:-1])
msg = "Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported:\n" + stack
warnings.warn(msg, DeprecationWarning)
| <commit_before>#pylint: disable=missing-docstring
from __future__ import absolute_import
import warnings
if __name__ == 'courseware':
warnings.warn("Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported", DeprecationWarning)
<commit_msg>Add more diagnostics to the courseware import warning<commit_after> | #pylint: disable=missing-docstring
from __future__ import absolute_import
import inspect
import warnings
if __name__ == 'courseware':
# pylint: disable=unicode-format-string
# Show the call stack that imported us wrong.
stack = "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in inspect.stack()[:0:-1])
msg = "Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported:\n" + stack
warnings.warn(msg, DeprecationWarning)
| #pylint: disable=missing-docstring
from __future__ import absolute_import
import warnings
if __name__ == 'courseware':
warnings.warn("Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported", DeprecationWarning)
Add more diagnostics to the courseware import warning#pylint: disable=missing-docstring
from __future__ import absolute_import
import inspect
import warnings
if __name__ == 'courseware':
# pylint: disable=unicode-format-string
# Show the call stack that imported us wrong.
stack = "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in inspect.stack()[:0:-1])
msg = "Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported:\n" + stack
warnings.warn(msg, DeprecationWarning)
| <commit_before>#pylint: disable=missing-docstring
from __future__ import absolute_import
import warnings
if __name__ == 'courseware':
warnings.warn("Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported", DeprecationWarning)
<commit_msg>Add more diagnostics to the courseware import warning<commit_after>#pylint: disable=missing-docstring
from __future__ import absolute_import
import inspect
import warnings
if __name__ == 'courseware':
# pylint: disable=unicode-format-string
# Show the call stack that imported us wrong.
stack = "\n".join("%30s : %s:%d" % (t[3], t[1], t[2]) for t in inspect.stack()[:0:-1])
msg = "Importing 'lms.djangoapps.courseware' as 'courseware' is no longer supported:\n" + stack
warnings.warn(msg, DeprecationWarning)
|
3beea9e3800f5c1e68f869d46d137162016a5276 | zc-list.py | zc-list.py | #!/usr/bin/env python
import sys
import client_wrap
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import sys
import argparse
import client_wrap
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--type", help="type of the cached data", default="double")
parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0")
parser.add_argument("-w", "--column", help="number of columns", type=int, default=1)
parser.add_argument("-l", "--log", help="log file name", default="")
args = parser.parse_args()
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
parse_args()
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
| Add the parsing arguments function | Add the parsing arguments function
| Python | agpl-3.0 | ellysh/zero-cache-utils,ellysh/zero-cache-utils | #!/usr/bin/env python
import sys
import client_wrap
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
Add the parsing arguments function | #!/usr/bin/env python
import sys
import argparse
import client_wrap
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--type", help="type of the cached data", default="double")
parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0")
parser.add_argument("-w", "--column", help="number of columns", type=int, default=1)
parser.add_argument("-l", "--log", help="log file name", default="")
args = parser.parse_args()
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
parse_args()
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
import sys
import client_wrap
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
<commit_msg>Add the parsing arguments function<commit_after> | #!/usr/bin/env python
import sys
import argparse
import client_wrap
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--type", help="type of the cached data", default="double")
parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0")
parser.add_argument("-w", "--column", help="number of columns", type=int, default=1)
parser.add_argument("-l", "--log", help="log file name", default="")
args = parser.parse_args()
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
parse_args()
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import sys
import client_wrap
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
Add the parsing arguments function#!/usr/bin/env python
import sys
import argparse
import client_wrap
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--type", help="type of the cached data", default="double")
parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0")
parser.add_argument("-w", "--column", help="number of columns", type=int, default=1)
parser.add_argument("-l", "--log", help="log file name", default="")
args = parser.parse_args()
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
parse_args()
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
import sys
import client_wrap
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
<commit_msg>Add the parsing arguments function<commit_after>#!/usr/bin/env python
import sys
import argparse
import client_wrap
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--type", help="type of the cached data", default="double")
parser.add_argument("-c", "--connection", help="connection string", default="ipc:///var/run/zero-cache/0")
parser.add_argument("-w", "--column", help="number of columns", type=int, default=1)
parser.add_argument("-l", "--log", help="log file name", default="")
args = parser.parse_args()
def get_keys(client):
key_str = client.GetKeys()
keys = key_str.split (';')
del keys[-1]
if len(keys) == 0:
sys.exit()
return keys
def print_keys(client, keys):
for key in keys:
value = client.ReadLong(key)
print "%s = %d" % (key, value)
def main():
parse_args()
client = client_wrap.ClientWrap("get_test.log", "ipc:///var/run/zero-cache/0", 0)
keys = get_keys(client)
print_keys(client, keys)
if __name__ == "__main__":
main()
|
cdb7e9fd80b6b5dc31b0096beb7283b373709b8a | tekka/lib/spell_entry.py | tekka/lib/spell_entry.py |
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
pass
| import gtk
from ..helper import color
from gettext import gettext as _
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
def __init__(self, *args, **kwargs):
super(SpellEntry, self).__init__(*args, **kwargs)
self._ranges = {}
self.connect("populate-popup", self._determine_popup_menu)
self.connect("activate", lambda s, x, *u: s._apply_color_tags())
def _determine_popup_menu(self, widget, menu, *user):
self._popup_menu_handler(menu)
# TODO:
# nothing marked: set color for whole text: { (0,n) : {"fg":x,"bg":y} }
# text marked: set color for specific text: { ..., (n,m) : {"fg":x,"bg":y} }
# nothing marked & dict !empty: clear dict & goto "nothing marked"
# text marked & dict !empty: goto "text marked"
# TODO: mark color setting visually via the pango Layout (set_markup)
# TODO: determine what happens if the user edits the text...
def _apply_color_tags(self):
pass
def _fg_item_activate(self, value):
pass
def _bg_item_activate(self, value):
pass
def _popup_menu_handler(self, menu):
fg_item = gtk.MenuItem(label=_("Foreground Color"))
fg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._fg_item_activate, value)
fg_submenu.append(item)
fg_item.set_submenu(fg_submenu)
fg_item.show_all()
menu.insert(fg_item, 0)
bg_item = gtk.MenuItem(label=_("Background Color"))
bg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._bg_item_activate, value)
bg_submenu.append(item)
bg_item.set_submenu(bg_submenu)
bg_item.show_all()
menu.insert(bg_item, 1)
| Add dummy foreground/background color menu | Add dummy foreground/background color menu
| Python | bsd-2-clause | sushi-irc/tekka |
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
pass
Add dummy foreground/background color menu | import gtk
from ..helper import color
from gettext import gettext as _
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
def __init__(self, *args, **kwargs):
super(SpellEntry, self).__init__(*args, **kwargs)
self._ranges = {}
self.connect("populate-popup", self._determine_popup_menu)
self.connect("activate", lambda s, x, *u: s._apply_color_tags())
def _determine_popup_menu(self, widget, menu, *user):
self._popup_menu_handler(menu)
# TODO:
# nothing marked: set color for whole text: { (0,n) : {"fg":x,"bg":y} }
# text marked: set color for specific text: { ..., (n,m) : {"fg":x,"bg":y} }
# nothing marked & dict !empty: clear dict & goto "nothing marked"
# text marked & dict !empty: goto "text marked"
# TODO: mark color setting visually via the pango Layout (set_markup)
# TODO: determine what happens if the user edits the text...
def _apply_color_tags(self):
pass
def _fg_item_activate(self, value):
pass
def _bg_item_activate(self, value):
pass
def _popup_menu_handler(self, menu):
fg_item = gtk.MenuItem(label=_("Foreground Color"))
fg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._fg_item_activate, value)
fg_submenu.append(item)
fg_item.set_submenu(fg_submenu)
fg_item.show_all()
menu.insert(fg_item, 0)
bg_item = gtk.MenuItem(label=_("Background Color"))
bg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._bg_item_activate, value)
bg_submenu.append(item)
bg_item.set_submenu(bg_submenu)
bg_item.show_all()
menu.insert(bg_item, 1)
| <commit_before>
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
pass
<commit_msg>Add dummy foreground/background color menu<commit_after> | import gtk
from ..helper import color
from gettext import gettext as _
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
def __init__(self, *args, **kwargs):
super(SpellEntry, self).__init__(*args, **kwargs)
self._ranges = {}
self.connect("populate-popup", self._determine_popup_menu)
self.connect("activate", lambda s, x, *u: s._apply_color_tags())
def _determine_popup_menu(self, widget, menu, *user):
self._popup_menu_handler(menu)
# TODO:
# nothing marked: set color for whole text: { (0,n) : {"fg":x,"bg":y} }
# text marked: set color for specific text: { ..., (n,m) : {"fg":x,"bg":y} }
# nothing marked & dict !empty: clear dict & goto "nothing marked"
# text marked & dict !empty: goto "text marked"
# TODO: mark color setting visually via the pango Layout (set_markup)
# TODO: determine what happens if the user edits the text...
def _apply_color_tags(self):
pass
def _fg_item_activate(self, value):
pass
def _bg_item_activate(self, value):
pass
def _popup_menu_handler(self, menu):
fg_item = gtk.MenuItem(label=_("Foreground Color"))
fg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._fg_item_activate, value)
fg_submenu.append(item)
fg_item.set_submenu(fg_submenu)
fg_item.show_all()
menu.insert(fg_item, 0)
bg_item = gtk.MenuItem(label=_("Background Color"))
bg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._bg_item_activate, value)
bg_submenu.append(item)
bg_item.set_submenu(bg_submenu)
bg_item.show_all()
menu.insert(bg_item, 1)
|
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
pass
Add dummy foreground/background color menuimport gtk
from ..helper import color
from gettext import gettext as _
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
def __init__(self, *args, **kwargs):
super(SpellEntry, self).__init__(*args, **kwargs)
self._ranges = {}
self.connect("populate-popup", self._determine_popup_menu)
self.connect("activate", lambda s, x, *u: s._apply_color_tags())
def _determine_popup_menu(self, widget, menu, *user):
self._popup_menu_handler(menu)
# TODO:
# nothing marked: set color for whole text: { (0,n) : {"fg":x,"bg":y} }
# text marked: set color for specific text: { ..., (n,m) : {"fg":x,"bg":y} }
# nothing marked & dict !empty: clear dict & goto "nothing marked"
# text marked & dict !empty: goto "text marked"
# TODO: mark color setting visually via the pango Layout (set_markup)
# TODO: determine what happens if the user edits the text...
def _apply_color_tags(self):
pass
def _fg_item_activate(self, value):
pass
def _bg_item_activate(self, value):
pass
def _popup_menu_handler(self, menu):
fg_item = gtk.MenuItem(label=_("Foreground Color"))
fg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._fg_item_activate, value)
fg_submenu.append(item)
fg_item.set_submenu(fg_submenu)
fg_item.show_all()
menu.insert(fg_item, 0)
bg_item = gtk.MenuItem(label=_("Background Color"))
bg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._bg_item_activate, value)
bg_submenu.append(item)
bg_item.set_submenu(bg_submenu)
bg_item.show_all()
menu.insert(bg_item, 1)
| <commit_before>
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
pass
<commit_msg>Add dummy foreground/background color menu<commit_after>import gtk
from ..helper import color
from gettext import gettext as _
try:
from sexy import SpellEntry as _SpellEntry
except ImportError:
from gtk import Entry as _SpellEntry
class SpellEntry(_SpellEntry):
__gtype_name__ = "SpellEntry"
def __init__(self, *args, **kwargs):
super(SpellEntry, self).__init__(*args, **kwargs)
self._ranges = {}
self.connect("populate-popup", self._determine_popup_menu)
self.connect("activate", lambda s, x, *u: s._apply_color_tags())
def _determine_popup_menu(self, widget, menu, *user):
self._popup_menu_handler(menu)
# TODO:
# nothing marked: set color for whole text: { (0,n) : {"fg":x,"bg":y} }
# text marked: set color for specific text: { ..., (n,m) : {"fg":x,"bg":y} }
# nothing marked & dict !empty: clear dict & goto "nothing marked"
# text marked & dict !empty: goto "text marked"
# TODO: mark color setting visually via the pango Layout (set_markup)
# TODO: determine what happens if the user edits the text...
def _apply_color_tags(self):
pass
def _fg_item_activate(self, value):
pass
def _bg_item_activate(self, value):
pass
def _popup_menu_handler(self, menu):
fg_item = gtk.MenuItem(label=_("Foreground Color"))
fg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._fg_item_activate, value)
fg_submenu.append(item)
fg_item.set_submenu(fg_submenu)
fg_item.show_all()
menu.insert(fg_item, 0)
bg_item = gtk.MenuItem(label=_("Background Color"))
bg_submenu = gtk.Menu()
for (value, name) in color.COLOR_NAMES.items():
item = gtk.MenuItem(label=" ".join([n.capitalize() for n in name.split(" ")]))
item.connect("activate", self._bg_item_activate, value)
bg_submenu.append(item)
bg_item.set_submenu(bg_submenu)
bg_item.show_all()
menu.insert(bg_item, 1)
|
cab4ec885d3101775ac16532d46f6f47700d1134 | IATISimpleTester/views/uploader.py | IATISimpleTester/views/uploader.py | import os.path
from flask import request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
resp = {}
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if form_name:
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
if request.args.get('output') == 'json':
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
| import os.path
from flask import abort, request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if not form_name:
return abort(404)
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
if request.args.get('output') == 'json':
resp = {}
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
| Tidy up upload response a bit | Tidy up upload response a bit
| Python | mit | pwyf/data-quality-tester,pwyf/data-quality-tester,pwyf/data-quality-tester,pwyf/data-quality-tester | import os.path
from flask import request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
resp = {}
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if form_name:
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
if request.args.get('output') == 'json':
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
Tidy up upload response a bit | import os.path
from flask import abort, request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if not form_name:
return abort(404)
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
if request.args.get('output') == 'json':
resp = {}
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
| <commit_before>import os.path
from flask import request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
resp = {}
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if form_name:
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
if request.args.get('output') == 'json':
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
<commit_msg>Tidy up upload response a bit<commit_after> | import os.path
from flask import abort, request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if not form_name:
return abort(404)
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
if request.args.get('output') == 'json':
resp = {}
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
| import os.path
from flask import request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
resp = {}
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if form_name:
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
if request.args.get('output') == 'json':
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
Tidy up upload response a bitimport os.path
from flask import abort, request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if not form_name:
return abort(404)
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
if request.args.get('output') == 'json':
resp = {}
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
| <commit_before>import os.path
from flask import request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
resp = {}
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if form_name:
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
if request.args.get('output') == 'json':
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
<commit_msg>Tidy up upload response a bit<commit_after>import os.path
from flask import abort, request, jsonify, redirect, url_for
from IATISimpleTester import app, db
from IATISimpleTester.models import SuppliedData
@app.route('/upload', methods=['GET', 'POST'])
def upload():
source_url = request.args.get('source_url')
file = request.files.get('file')
raw_text = request.args.get('paste')
form_name = None
if source_url:
form_name = 'url_form'
elif raw_text:
form_name = 'text_form'
elif file:
form_name = 'upload_form'
if not form_name:
return abort(404)
data = SuppliedData(source_url, file, raw_text, form_name)
db.session.add(data)
db.session.commit()
if request.args.get('output') == 'json':
resp = {}
resp['success'] = True
resp['data'] = {
'id': data.id,
'original_file': data.original_file,
}
return jsonify(resp)
return redirect(url_for('explore', uuid=data.id))
|
b5fecb2dd0d85d97716b21aa34a8c41d06bc476f | {{cookiecutter.repo_name}}/tests/test_extension.py | {{cookiecutter.repo_name}}/tests/test_extension.py | import unittest
from mopidy_scrobbler import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
| import unittest
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
| Remove reference to scrobbler extension | Remove reference to scrobbler extension
| Python | apache-2.0 | mopidy/cookiecutter-mopidy-ext | import unittest
from mopidy_scrobbler import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
Remove reference to scrobbler extension | import unittest
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
| <commit_before>import unittest
from mopidy_scrobbler import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
<commit_msg>Remove reference to scrobbler extension<commit_after> | import unittest
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
| import unittest
from mopidy_scrobbler import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
Remove reference to scrobbler extensionimport unittest
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
| <commit_before>import unittest
from mopidy_scrobbler import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
<commit_msg>Remove reference to scrobbler extension<commit_after>import unittest
from mopidy_{{ cookiecutter.ext_name }} import Extension, frontend as frontend_lib
class ExtensionTest(unittest.TestCase):
def test_get_default_config(self):
ext = Extension()
config = ext.get_default_config()
self.assertIn('[{{ cookiecutter.ext_name }}]', config)
self.assertIn('enabled = true', config)
def test_get_config_schema(self):
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#self.assertIn('username', schema)
#self.assertIn('password', schema)
# TODO Write more tests
|
2aab542cc74fdc0cf060518241f01fd74d91ecb5 | byceps/services/user/transfer/models.py | byceps/services/user/transfer/models.py | """
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
| """
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
| Fix type hint for avatar URL in user DTO | Fix type hint for avatar URL in user DTO
| Python | bsd-3-clause | m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps | """
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
Fix type hint for avatar URL in user DTO | """
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
| <commit_before>"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
<commit_msg>Fix type hint for avatar URL in user DTO<commit_after> | """
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
| """
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
Fix type hint for avatar URL in user DTO"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
| <commit_before>"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: str
is_orga: bool
<commit_msg>Fix type hint for avatar URL in user DTO<commit_after>"""
byceps.services.user.transfer.models
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import Optional
from attr import attrs
from ....typing import UserID
@attrs(auto_attribs=True, frozen=True, slots=True)
class User:
id: UserID
screen_name: str
suspended: bool
deleted: bool
avatar_url: Optional[str]
is_orga: bool
|
1c4e75c243205ad47cd2e47a3d836c6c9e516db4 | pika/amqp_object.py | pika/amqp_object.py | """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
| """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
INDEX = None
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
synchronous = False
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
| Add a few base attributes | Add a few base attributes
| Python | bsd-3-clause | reddec/pika,shinji-s/pika,fkarb/pika-python3,renshawbay/pika-python3,zixiliuyue/pika,skftn/pika,vitaly-krugl/pika,vrtsystems/pika,hugoxia/pika,knowsis/pika,Tarsbot/pika,Zephor5/pika,jstnlef/pika,benjamin9999/pika,pika/pika | """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
Add a few base attributes | """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
INDEX = None
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
synchronous = False
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
| <commit_before>"""Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
<commit_msg>Add a few base attributes<commit_after> | """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
INDEX = None
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
synchronous = False
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
| """Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
Add a few base attributes"""Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
INDEX = None
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
synchronous = False
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
| <commit_before>"""Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
<commit_msg>Add a few base attributes<commit_after>"""Base classes that are extended by low level AMQP frames and higher level
AMQP classes and methods.
"""
class AMQPObject(object):
"""Base object that is extended by AMQP low level frames and AMQP classes
and methods.
"""
NAME = 'AMQPObject'
INDEX = None
def __repr__(self):
items = list()
for key, value in self.__dict__.iteritems():
if getattr(self.__class__, key, None) != value:
items.append('%s=%s' % (key, value))
if not items:
return "<%s>" % self.NAME
return "<%s(%s)>" % (self.NAME, items)
class Class(AMQPObject):
"""Is extended by AMQP classes"""
NAME = 'Unextended Class'
class Method(AMQPObject):
"""Is extended by AMQP methods"""
NAME = 'Unextended Method'
synchronous = False
def _set_content(self, properties, body):
"""If the method is a content frame, set the properties and body to
be carried as attributes of the class.
:param pika.frame.Properties properties: AMQP Basic Properties
:param str|unicode body: The message body
"""
self._properties = properties
self._body = body
def get_properties(self):
"""Return the properties if they are set.
:rtype: pika.frame.Properties
"""
return self._properties
def get_body(self):
"""Return the message body if it is set.
:rtype: str|unicode
"""
return self._body
class Properties(AMQPObject):
"""Class to encompass message properties (AMQP Basic.Properties)"""
NAME = 'Unextended Properties'
|
5830f5590ed185116dd4807f6351ad3afeb0dd5d | plugins/postgres/dbt/adapters/postgres/relation.py | plugins/postgres/dbt/adapters/postgres/relation.py | from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (
self.identifier is not None
and self.type is not None
and len(self.identifier) > self.relation_max_name_length()
):
raise RuntimeException(
f"Postgres relation name '{self.identifier}' is longer than "
f"{self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
| from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (self.identifier is not None and self.type is not None and
len(self.identifier) > self.relation_max_name_length()):
raise RuntimeException(
f"Relation name '{self.identifier}' "
f"is longer than {self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
| Tweak error message, reformat for flake8 | Tweak error message, reformat for flake8
| Python | apache-2.0 | analyst-collective/dbt,analyst-collective/dbt | from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (
self.identifier is not None
and self.type is not None
and len(self.identifier) > self.relation_max_name_length()
):
raise RuntimeException(
f"Postgres relation name '{self.identifier}' is longer than "
f"{self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
Tweak error message, reformat for flake8 | from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (self.identifier is not None and self.type is not None and
len(self.identifier) > self.relation_max_name_length()):
raise RuntimeException(
f"Relation name '{self.identifier}' "
f"is longer than {self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
| <commit_before>from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (
self.identifier is not None
and self.type is not None
and len(self.identifier) > self.relation_max_name_length()
):
raise RuntimeException(
f"Postgres relation name '{self.identifier}' is longer than "
f"{self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
<commit_msg>Tweak error message, reformat for flake8<commit_after> | from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (self.identifier is not None and self.type is not None and
len(self.identifier) > self.relation_max_name_length()):
raise RuntimeException(
f"Relation name '{self.identifier}' "
f"is longer than {self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
| from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (
self.identifier is not None
and self.type is not None
and len(self.identifier) > self.relation_max_name_length()
):
raise RuntimeException(
f"Postgres relation name '{self.identifier}' is longer than "
f"{self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
Tweak error message, reformat for flake8from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (self.identifier is not None and self.type is not None and
len(self.identifier) > self.relation_max_name_length()):
raise RuntimeException(
f"Relation name '{self.identifier}' "
f"is longer than {self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
| <commit_before>from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (
self.identifier is not None
and self.type is not None
and len(self.identifier) > self.relation_max_name_length()
):
raise RuntimeException(
f"Postgres relation name '{self.identifier}' is longer than "
f"{self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
<commit_msg>Tweak error message, reformat for flake8<commit_after>from dbt.adapters.base import Column
from dataclasses import dataclass
from dbt.adapters.base.relation import BaseRelation
from dbt.exceptions import RuntimeException
@dataclass(frozen=True, eq=False, repr=False)
class PostgresRelation(BaseRelation):
def __post_init__(self):
# Check for length of Postgres table/view names.
# Check self.type to exclude test relation identifiers
if (self.identifier is not None and self.type is not None and
len(self.identifier) > self.relation_max_name_length()):
raise RuntimeException(
f"Relation name '{self.identifier}' "
f"is longer than {self.relation_max_name_length()} characters"
)
def relation_max_name_length(self):
return 63
class PostgresColumn(Column):
@property
def data_type(self):
# on postgres, do not convert 'text' to 'varchar()'
if self.dtype.lower() == 'text':
return self.dtype
return super().data_type
|
d0861d4c072e81c94c06b093469224c076c6c362 | armstrong/core/arm_sections/tests/_utils.py | armstrong/core/arm_sections/tests/_utils.py | from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class ArmSectionsTestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
| from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class TestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
# TODO: Refactor the test cases to remove ArmSectionsTestCase
ArmSectionsTestCase = TestCase
| Adjust in a BC way to introduce TestCase | Adjust in a BC way to introduce TestCase
| Python | apache-2.0 | texastribune/armstrong.core.tt_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections,texastribune/armstrong.core.tt_sections,armstrong/armstrong.core.arm_sections | from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class ArmSectionsTestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
Adjust in a BC way to introduce TestCase | from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class TestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
# TODO: Refactor the test cases to remove ArmSectionsTestCase
ArmSectionsTestCase = TestCase
| <commit_before>from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class ArmSectionsTestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
<commit_msg>Adjust in a BC way to introduce TestCase<commit_after> | from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class TestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
# TODO: Refactor the test cases to remove ArmSectionsTestCase
ArmSectionsTestCase = TestCase
| from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class ArmSectionsTestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
Adjust in a BC way to introduce TestCasefrom datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class TestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
# TODO: Refactor the test cases to remove ArmSectionsTestCase
ArmSectionsTestCase = TestCase
| <commit_before>from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class ArmSectionsTestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
<commit_msg>Adjust in a BC way to introduce TestCase<commit_after>from datetime import datetime
from django.core.files import File
from django.conf import settings
from armstrong.dev.tests.utils import ArmstrongTestCase
from armstrong.dev.tests.utils.backports import *
from armstrong.dev.tests.utils.concrete import *
from armstrong.dev.tests.utils.users import *
from ..models import Section
import fudge
class TestCase(ArmstrongTestCase):
def setUp(self):
super(ArmSectionsTestCase, self).setUp()
self.sections = []
data = [
('Local', 'local', 'All about local', None),
('Sports', 'sports', 'All about sports', None),
('College', 'college', 'All about college sports', 1),
('Pro', 'pro', 'All about pro sports', 1),
('US', 'us', 'All about US sports', 3),
('Weather', 'weather', 'All about weather', None),
]
for title, slug, summary, parent in data:
if parent is not None:
parent = self.sections[parent]
self.sections.append(Section.objects.create(
title=title,
slug=slug,
summary=summary,
parent=parent,
))
# TODO: Refactor the test cases to remove ArmSectionsTestCase
ArmSectionsTestCase = TestCase
|
1007ee608ac448941e9bb8958c6b2308abeb63ef | github-listener/listener.py | github-listener/listener.py | import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
| import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
if flask.request.headers['X-GitHub-Event'] == 'ping':
return flask.jsonify(zen_level="super")
if flask.request.headers['X-GitHub-Event'] == 'push':
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
| Add support for github ping event | Add support for github ping event
| Python | mit | Storj/lunchbox,Storj/lunchbox | import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
Add support for github ping event | import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
if flask.request.headers['X-GitHub-Event'] == 'ping':
return flask.jsonify(zen_level="super")
if flask.request.headers['X-GitHub-Event'] == 'push':
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
| <commit_before>import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
<commit_msg>Add support for github ping event<commit_after> | import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
if flask.request.headers['X-GitHub-Event'] == 'ping':
return flask.jsonify(zen_level="super")
if flask.request.headers['X-GitHub-Event'] == 'push':
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
| import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
Add support for github ping eventimport flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
if flask.request.headers['X-GitHub-Event'] == 'ping':
return flask.jsonify(zen_level="super")
if flask.request.headers['X-GitHub-Event'] == 'push':
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
| <commit_before>import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
<commit_msg>Add support for github ping event<commit_after>import flask
import registry
app = flask.Flask(__name__)
def get_registry():
r = getattr(flask.g, '_registry', None)
if r is None:
r = flask.g._registry = registry.Registry("hooks.json")
return r
@app.route("/hook/<repo>", methods=['POST'])
def hook(repo):
if flask.request.headers['X-GitHub-Event'] == 'ping':
return flask.jsonify(zen_level="super")
if flask.request.headers['X-GitHub-Event'] == 'push':
branch = flask.request.json["ref"].split("/")[-1]
sha = flask.request.json["head"]
get_registry().notify(repo, branch, sha)
return flask.jsonify(status="ok")
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=8080)
|
82155f3caad1220eeb2ee718142c5aace8600f87 | django-jquery-file-upload/urls.py | django-jquery-file-upload/urls.py | from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
| from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
from os.path import join, abspath, dirname
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': join(abspath(dirname(dirname(__file__))), 'media')}),
)
| Fix serve media files path | Fix serve media files path
| Python | mit | minhlongdo/django-jquery-file-upload,Imaginashion/cloud-vision,vaniakov/django-jquery-file-upload,indrajithi/mgc-django,sigurdga/django-jquery-file-upload,Imaginashion/cloud-vision,minhlongdo/django-jquery-file-upload,Imaginashion/cloud-vision,vaniakov/django-jquery-file-upload,Imaginashion/cloud-vision,sigurdga/django-jquery-file-upload,Imaginashion/cloud-vision,vaniakov/django-jquery-file-upload,indrajithi/mgc-django,sigurdga/django-jquery-file-upload,Imaginashion/cloud-vision,minhlongdo/django-jquery-file-upload | from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
Fix serve media files path | from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
from os.path import join, abspath, dirname
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': join(abspath(dirname(dirname(__file__))), 'media')}),
)
| <commit_before>from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
<commit_msg>Fix serve media files path<commit_after> | from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
from os.path import join, abspath, dirname
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': join(abspath(dirname(dirname(__file__))), 'media')}),
)
| from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
Fix serve media files pathfrom django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
from os.path import join, abspath, dirname
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': join(abspath(dirname(dirname(__file__))), 'media')}),
)
| <commit_before>from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
import os
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': os.path.join(os.path.abspath(os.path.dirname(__file__)), 'media')}),
)
<commit_msg>Fix serve media files path<commit_after>from django.conf.urls import patterns, include, url
from django.http import HttpResponseRedirect
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'upload.views.home', name='home'),
url(r'^$', lambda x: HttpResponseRedirect('/upload/new/')),
url(r'^upload/', include('fileupload.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
)
from os.path import join, abspath, dirname
urlpatterns += patterns('',
(r'^media/(.*)$', 'django.views.static.serve', {'document_root': join(abspath(dirname(dirname(__file__))), 'media')}),
)
|
67ab4b7374d739719700f84f0f5726f1b0c476d8 | cybox/test/objects/mutex_test.py | cybox/test/objects/mutex_test.py | import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
| import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
| Fix some more unittest assert methods for Python 2.6 | Fix some more unittest assert methods for Python 2.6
| Python | bsd-3-clause | CybOXProject/python-cybox | import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
Fix some more unittest assert methods for Python 2.6 | import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix some more unittest assert methods for Python 2.6<commit_after> | import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
| import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
Fix some more unittest assert methods for Python 2.6import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_obj, mutex_binding.MutexObjectType)
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertIsInstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType)
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertIsInstance(self.mutex_dict, dict)
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix some more unittest assert methods for Python 2.6<commit_after>import unittest
import cybox.bindings.cybox_common_types_1_0 as common_types_binding
import cybox.bindings.mutex_object_1_3 as mutex_binding
from cybox.objects.mutex_object import Mutex
class MutexTest(unittest.TestCase):
def setUp(self):
self.test_dict = {'named': True, 'name': {'value': 'test_name'}}
self.mutex_obj = Mutex.object_from_dict(self.test_dict)
self.mutex_dict = Mutex.dict_from_object(self.mutex_obj)
def test_obj_from_dict(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_obj,
mutex_binding.MutexObjectType))
#Test the named attribute
self.assertEqual(self.mutex_obj.get_named(), True)
#Test the name element
self.assertTrue(isinstance(self.mutex_obj.get_Name(),
common_types_binding.StringObjectAttributeType))
self.assertEqual(self.mutex_obj.get_Name().get_valueOf_(), 'test_name')
def test_dict_from_obj(self):
#Make sure it's an instance of the right class
self.assertTrue(isinstance(self.mutex_dict, dict))
#Test the dictionary values
self.assertEqual(self.mutex_dict['named'], True)
self.assertEqual(self.mutex_dict['name']['value'], 'test_name')
if __name__ == "__main__":
unittest.main()
|
02937272206a526ff62b164fc54a14c385eb6970 | common/lib/xmodule/xmodule/hidden_module.py | common/lib/xmodule/xmodule/hidden_module.py | from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return "ERROR: This module is unknown--students will not see it at all"
else:
return ""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
| from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return u"ERROR: This module is unknown--students will not see it at all"
else:
return u""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
| Return unicode string to pass assertion | hidden-module-unicode: Return unicode string to pass assertion
XBlock Fragments expect unicode strings, and fail on an assertion when
it isn't:
```
2013-11-14 07:55:50,774 ERROR 3788 [django.request] base.py:215 - Internal Server Error: /courses/TestU/TST101/now/courseware/41d55c576a574fde99319420228f7f88/5fef5794e34842f4a2d45ebcdeaa9a3a/
Traceback (most recent call last):
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 20, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/utils/decorators.py", line 91, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 75, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/edx/app/edxapp/edx-platform/lms/djangoapps/courseware/views.py", line 407, in index
context['fragment'] = section_module.render('student_view')
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/seq_module.py", line 77, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/vertical_module.py", line 27, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 464, in student_view
return Fragment(self.get_html())
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 34, in __init__
self.add_content(content)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 71, in add_content
assert isinstance(content, unicode)
AssertionError
```
| Python | agpl-3.0 | louyihua/edx-platform,Endika/edx-platform,mushtaqak/edx-platform,shubhdev/openedx,valtech-mooc/edx-platform,a-parhom/edx-platform,Livit/Livit.Learn.EdX,longmen21/edx-platform,knehez/edx-platform,shurihell/testasia,mbareta/edx-platform-ft,hkawasaki/kawasaki-aio8-1,Softmotions/edx-platform,kursitet/edx-platform,dsajkl/reqiop,xingyepei/edx-platform,hamzehd/edx-platform,pelikanchik/edx-platform,Edraak/circleci-edx-platform,cpennington/edx-platform,shubhdev/edxOnBaadal,rismalrv/edx-platform,JCBarahona/edX,dcosentino/edx-platform,angelapper/edx-platform,wwj718/edx-platform,polimediaupv/edx-platform,mitocw/edx-platform,etzhou/edx-platform,ampax/edx-platform-backup,xinjiguaike/edx-platform,TsinghuaX/edx-platform,jolyonb/edx-platform,ferabra/edx-platform,atsolakid/edx-platform,romain-li/edx-platform,shashank971/edx-platform,MSOpenTech/edx-platform,y12uc231/edx-platform,gymnasium/edx-platform,shurihell/testasia,polimediaupv/edx-platform,auferack08/edx-platform,antonve/s4-project-mooc,tanmaykm/edx-platform,ahmedaljazzar/edx-platform,beni55/edx-platform,abdoosh00/edx-rtl-final,chand3040/cloud_that,nttks/edx-platform,procangroup/edx-platform,doganov/edx-platform,jamesblunt/edx-platform,xinjiguaike/edx-platform,mahendra-r/edx-platform,Softmotions/edx-platform,BehavioralInsightsTeam/edx-platform,inares/edx-platform,CredoReference/edx-platform,yokose-ks/edx-platform,Semi-global/edx-platform,4eek/edx-platform,nikolas/edx-platform,knehez/edx-platform,franosincic/edx-platform,knehez/edx-platform,nttks/jenkins-test,dcosentino/edx-platform,naresh21/synergetics-edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,playm2mboy/edx-platform,eestay/edx-platform,jbassen/edx-platform,mtlchun/edx,jbzdak/edx-platform,msegado/edx-platform,CredoReference/edx-platform,zofuthan/edx-platform,eduNEXT/edx-platform,xingyepei/edx-platform,stvstnfrd/edx-platform,Edraak/edraak-platform,Kalyzee/edx-platform,Unow/edx-platform,y12uc231/edx-platform,OmarIthawi/edx-platform,waheedahmed/edx-platform,jazkarta/edx-platform,teltek/edx-platform,chauhanhardik/populo_2,martynovp/edx-platform,UOMx/edx-platform,DNFcode/edx-platform,jamiefolsom/edx-platform,naresh21/synergetics-edx-platform,defance/edx-platform,benpatterson/edx-platform,beni55/edx-platform,wwj718/ANALYSE,xuxiao19910803/edx,DefyVentures/edx-platform,Stanford-Online/edx-platform,cselis86/edx-platform,beacloudgenius/edx-platform,simbs/edx-platform,xuxiao19910803/edx-platform,jamesblunt/edx-platform,Softmotions/edx-platform,mjirayu/sit_academy,martynovp/edx-platform,bdero/edx-platform,hmcmooc/muddx-platform,hkawasaki/kawasaki-aio8-1,fly19890211/edx-platform,Edraak/edraak-platform,dkarakats/edx-platform,bdero/edx-platform,edx/edx-platform,torchingloom/edx-platform,knehez/edx-platform,pabloborrego93/edx-platform,DefyVentures/edx-platform,benpatterson/edx-platform,xuxiao19910803/edx,teltek/edx-platform,IONISx/edx-platform,marcore/edx-platform,Stanford-Online/edx-platform,rhndg/openedx,UXE/local-edx,kmoocdev/edx-platform,kxliugang/edx-platform,jonathan-beard/edx-platform,dcosentino/edx-platform,eduNEXT/edx-platform,kmoocdev2/edx-platform,amir-qayyum-khan/edx-platform,tiagochiavericosta/edx-platform,J861449197/edx-platform,amir-qayyum-khan/edx-platform,playm2mboy/edx-platform,cyanna/edx-platform,TeachAtTUM/edx-platform,jzoldak/edx-platform,ZLLab-Mooc/edx-platform,shubhdev/edx-platform,Semi-global/edx-platform,Edraak/edx-platform,ahmedaljazzar/edx-platform,hkawasaki/kawasaki-aio8-0,hastexo/edx-platform,Shrhawk/edx-platform,LearnEra/LearnEraPlaftform,leansoft/edx-platform,ESOedX/edx-platform,nanolearning/edx-platform,shurihell/testasia,shubhdev/edx-platform,Endika/edx-platform,lduarte1991/edx-platform,ferabra/edx-platform,MakeHer/edx-platform,bitifirefly/edx-platform,romain-li/edx-platform,kamalx/edx-platform,Semi-global/edx-platform,louyihua/edx-platform,hkawasaki/kawasaki-aio8-0,abdoosh00/edraak,utecuy/edx-platform,mjirayu/sit_academy,dcosentino/edx-platform,arbrandes/edx-platform,LICEF/edx-platform,beni55/edx-platform,rismalrv/edx-platform,shubhdev/edxOnBaadal,fly19890211/edx-platform,miptliot/edx-platform,vikas1885/test1,torchingloom/edx-platform,shabab12/edx-platform,yokose-ks/edx-platform,hkawasaki/kawasaki-aio8-2,sameetb-cuelogic/edx-platform-test,tiagochiavericosta/edx-platform,antoviaque/edx-platform,kxliugang/edx-platform,dsajkl/reqiop,jzoldak/edx-platform,wwj718/edx-platform,antonve/s4-project-mooc,mcgachey/edx-platform,ZLLab-Mooc/edx-platform,shubhdev/edxOnBaadal,bigdatauniversity/edx-platform,pepeportela/edx-platform,abdoosh00/edx-rtl-final,vasyarv/edx-platform,jswope00/griffinx,eemirtekin/edx-platform,inares/edx-platform,TeachAtTUM/edx-platform,4eek/edx-platform,abdoosh00/edx-rtl-final,jelugbo/tundex,nttks/edx-platform,olexiim/edx-platform,adoosii/edx-platform,Edraak/edx-platform,Ayub-Khan/edx-platform,Softmotions/edx-platform,halvertoluke/edx-platform,jbzdak/edx-platform,jazztpt/edx-platform,WatanabeYasumasa/edx-platform,mtlchun/edx,LICEF/edx-platform,jazztpt/edx-platform,vasyarv/edx-platform,shubhdev/edx-platform,CourseTalk/edx-platform,hkawasaki/kawasaki-aio8-0,eduNEXT/edunext-platform,rue89-tech/edx-platform,dsajkl/123,AkA84/edx-platform,utecuy/edx-platform,ubc/edx-platform,Shrhawk/edx-platform,hmcmooc/muddx-platform,mjirayu/sit_academy,edx/edx-platform,devs1991/test_edx_docmode,ubc/edx-platform,shabab12/edx-platform,nagyistoce/edx-platform,nanolearningllc/edx-platform-cypress-2,yokose-ks/edx-platform,playm2mboy/edx-platform,angelapper/edx-platform,jonathan-beard/edx-platform,nanolearningllc/edx-platform-cypress,amir-qayyum-khan/edx-platform,msegado/edx-platform,motion2015/edx-platform,dsajkl/123,hastexo/edx-platform,cecep-edu/edx-platform,xuxiao19910803/edx,caesar2164/edx-platform,vasyarv/edx-platform,AkA84/edx-platform,dkarakats/edx-platform,AkA84/edx-platform,bigdatauniversity/edx-platform,10clouds/edx-platform,JCBarahona/edX,morenopc/edx-platform,Ayub-Khan/edx-platform,jazkarta/edx-platform,nagyistoce/edx-platform,hamzehd/edx-platform,Lektorium-LLC/edx-platform,xuxiao19910803/edx-platform,jazkarta/edx-platform,B-MOOC/edx-platform,analyseuc3m/ANALYSE-v1,cecep-edu/edx-platform,TsinghuaX/edx-platform,sameetb-cuelogic/edx-platform-test,philanthropy-u/edx-platform,mtlchun/edx,jbzdak/edx-platform,mitocw/edx-platform,mitocw/edx-platform,devs1991/test_edx_docmode,unicri/edx-platform,utecuy/edx-platform,playm2mboy/edx-platform,kmoocdev2/edx-platform,mahendra-r/edx-platform,gymnasium/edx-platform,fintech-circle/edx-platform,hamzehd/edx-platform,chauhanhardik/populo_2,ovnicraft/edx-platform,gsehub/edx-platform,kmoocdev/edx-platform,shashank971/edx-platform,nanolearningllc/edx-platform-cypress,Stanford-Online/edx-platform,MSOpenTech/edx-platform,TeachAtTUM/edx-platform,ovnicraft/edx-platform,IndonesiaX/edx-platform,solashirai/edx-platform,antoviaque/edx-platform,polimediaupv/edx-platform,arifsetiawan/edx-platform,iivic/BoiseStateX,deepsrijit1105/edx-platform,mjg2203/edx-platform-seas,Ayub-Khan/edx-platform,mitocw/edx-platform,jruiperezv/ANALYSE,appliedx/edx-platform,nanolearning/edx-platform,fintech-circle/edx-platform,WatanabeYasumasa/edx-platform,chrisndodge/edx-platform,UOMx/edx-platform,longmen21/edx-platform,pomegranited/edx-platform,nanolearningllc/edx-platform-cypress,zhenzhai/edx-platform,a-parhom/edx-platform,Edraak/edraak-platform,rhndg/openedx,zerobatu/edx-platform,openfun/edx-platform,unicri/edx-platform,motion2015/a3,zerobatu/edx-platform,iivic/BoiseStateX,MakeHer/edx-platform,leansoft/edx-platform,olexiim/edx-platform,ampax/edx-platform,kursitet/edx-platform,JCBarahona/edX,jjmiranda/edx-platform,motion2015/a3,cognitiveclass/edx-platform,IONISx/edx-platform,shurihell/testasia,JCBarahona/edX,andyzsf/edx,martynovp/edx-platform,peterm-itr/edx-platform,beacloudgenius/edx-platform,edry/edx-platform,nttks/edx-platform,UXE/local-edx,tiagochiavericosta/edx-platform,edx-solutions/edx-platform,mushtaqak/edx-platform,Livit/Livit.Learn.EdX,pomegranited/edx-platform,openfun/edx-platform,chrisndodge/edx-platform,devs1991/test_edx_docmode,chudaol/edx-platform,zhenzhai/edx-platform,chand3040/cloud_that,Edraak/edx-platform,ahmadio/edx-platform,Kalyzee/edx-platform,rismalrv/edx-platform,ESOedX/edx-platform,abdoosh00/edraak,mcgachey/edx-platform,doganov/edx-platform,zofuthan/edx-platform,analyseuc3m/ANALYSE-v1,ferabra/edx-platform,nagyistoce/edx-platform,xingyepei/edx-platform,jamiefolsom/edx-platform,benpatterson/edx-platform,tiagochiavericosta/edx-platform,leansoft/edx-platform,leansoft/edx-platform,olexiim/edx-platform,raccoongang/edx-platform,leansoft/edx-platform,gsehub/edx-platform,eestay/edx-platform,B-MOOC/edx-platform,nikolas/edx-platform,nttks/edx-platform,devs1991/test_edx_docmode,zadgroup/edx-platform,jazkarta/edx-platform-for-isc,edry/edx-platform,LearnEra/LearnEraPlaftform,kmoocdev/edx-platform,angelapper/edx-platform,jelugbo/tundex,vismartltd/edx-platform,CourseTalk/edx-platform,itsjeyd/edx-platform,morenopc/edx-platform,carsongee/edx-platform,vikas1885/test1,JioEducation/edx-platform,LearnEra/LearnEraPlaftform,chudaol/edx-platform,vismartltd/edx-platform,torchingloom/edx-platform,ak2703/edx-platform,zofuthan/edx-platform,defance/edx-platform,arbrandes/edx-platform,Livit/Livit.Learn.EdX,sudheerchintala/LearnEraPlatForm,kxliugang/edx-platform,solashirai/edx-platform,OmarIthawi/edx-platform,alexthered/kienhoc-platform,zhenzhai/edx-platform,longmen21/edx-platform,pku9104038/edx-platform,kamalx/edx-platform,jolyonb/edx-platform,edx-solutions/edx-platform,pepeportela/edx-platform,zofuthan/edx-platform,adoosii/edx-platform,hmcmooc/muddx-platform,ahmadiga/min_edx,RPI-OPENEDX/edx-platform,RPI-OPENEDX/edx-platform,jbzdak/edx-platform,doismellburning/edx-platform,MakeHer/edx-platform,don-github/edx-platform,DefyVentures/edx-platform,zerobatu/edx-platform,etzhou/edx-platform,dsajkl/123,mbareta/edx-platform-ft,LICEF/edx-platform,motion2015/edx-platform,defance/edx-platform,mcgachey/edx-platform,B-MOOC/edx-platform,WatanabeYasumasa/edx-platform,pelikanchik/edx-platform,nanolearningllc/edx-platform-cypress,simbs/edx-platform,nikolas/edx-platform,cpennington/edx-platform,JioEducation/edx-platform,morenopc/edx-platform,itsjeyd/edx-platform,Ayub-Khan/edx-platform,y12uc231/edx-platform,ubc/edx-platform,ahmadio/edx-platform,CredoReference/edx-platform,nttks/jenkins-test,eemirtekin/edx-platform,cselis86/edx-platform,kamalx/edx-platform,mjirayu/sit_academy,iivic/BoiseStateX,teltek/edx-platform,valtech-mooc/edx-platform,martynovp/edx-platform,ZLLab-Mooc/edx-platform,wwj718/ANALYSE,ovnicraft/edx-platform,SivilTaram/edx-platform,Ayub-Khan/edx-platform,motion2015/edx-platform,BehavioralInsightsTeam/edx-platform,eduNEXT/edunext-platform,apigee/edx-platform,xuxiao19910803/edx-platform,pepeportela/edx-platform,devs1991/test_edx_docmode,hastexo/edx-platform,J861449197/edx-platform,hkawasaki/kawasaki-aio8-2,kmoocdev2/edx-platform,jswope00/GAI,shubhdev/edx-platform,a-parhom/edx-platform,MakeHer/edx-platform,hamzehd/edx-platform,4eek/edx-platform,proversity-org/edx-platform,proversity-org/edx-platform,kamalx/edx-platform,appsembler/edx-platform,arifsetiawan/edx-platform,procangroup/edx-platform,openfun/edx-platform,shashank971/edx-platform,etzhou/edx-platform,kxliugang/edx-platform,etzhou/edx-platform,chand3040/cloud_that,shubhdev/openedx,jazkarta/edx-platform,rue89-tech/edx-platform,martynovp/edx-platform,mjg2203/edx-platform-seas,jruiperezv/ANALYSE,fintech-circle/edx-platform,LearnEra/LearnEraPlaftform,motion2015/a3,appliedx/edx-platform,ahmadiga/min_edx,playm2mboy/edx-platform,Edraak/circleci-edx-platform,pelikanchik/edx-platform,edry/edx-platform,jbzdak/edx-platform,wwj718/edx-platform,jazztpt/edx-platform,pomegranited/edx-platform,ampax/edx-platform,IONISx/edx-platform,hmcmooc/muddx-platform,jamiefolsom/edx-platform,antonve/s4-project-mooc,rhndg/openedx,rue89-tech/edx-platform,hkawasaki/kawasaki-aio8-1,deepsrijit1105/edx-platform,Endika/edx-platform,fly19890211/edx-platform,openfun/edx-platform,jolyonb/edx-platform,pku9104038/edx-platform,WatanabeYasumasa/edx-platform,vikas1885/test1,franosincic/edx-platform,cyanna/edx-platform,doganov/edx-platform,jazztpt/edx-platform,ahmadio/edx-platform,wwj718/ANALYSE,MSOpenTech/edx-platform,mtlchun/edx,ahmadiga/min_edx,itsjeyd/edx-platform,zadgroup/edx-platform,jazztpt/edx-platform,ubc/edx-platform,fintech-circle/edx-platform,TsinghuaX/edx-platform,xinjiguaike/edx-platform,msegado/edx-platform,stvstnfrd/edx-platform,rue89-tech/edx-platform,alexthered/kienhoc-platform,apigee/edx-platform,Semi-global/edx-platform,Unow/edx-platform,jjmiranda/edx-platform,msegado/edx-platform,chudaol/edx-platform,nanolearningllc/edx-platform-cypress-2,prarthitm/edxplatform,don-github/edx-platform,4eek/edx-platform,xingyepei/edx-platform,alu042/edx-platform,SivilTaram/edx-platform,tanmaykm/edx-platform,alexthered/kienhoc-platform,doismellburning/edx-platform,fly19890211/edx-platform,atsolakid/edx-platform,cognitiveclass/edx-platform,morenopc/edx-platform,zubair-arbi/edx-platform,shubhdev/edxOnBaadal,adoosii/edx-platform,mbareta/edx-platform-ft,UOMx/edx-platform,jruiperezv/ANALYSE,bigdatauniversity/edx-platform,jazkarta/edx-platform-for-isc,pabloborrego93/edx-platform,eestay/edx-platform,jswope00/GAI,simbs/edx-platform,sudheerchintala/LearnEraPlatForm,vismartltd/edx-platform,amir-qayyum-khan/edx-platform,mahendra-r/edx-platform,rismalrv/edx-platform,halvertoluke/edx-platform,pabloborrego93/edx-platform,analyseuc3m/ANALYSE-v1,nagyistoce/edx-platform,waheedahmed/edx-platform,Semi-global/edx-platform,kursitet/edx-platform,ak2703/edx-platform,gymnasium/edx-platform,ahmadio/edx-platform,a-parhom/edx-platform,ak2703/edx-platform,caesar2164/edx-platform,jamesblunt/edx-platform,tiagochiavericosta/edx-platform,synergeticsedx/deployment-wipro,mjirayu/sit_academy,polimediaupv/edx-platform,cyanna/edx-platform,louyihua/edx-platform,mcgachey/edx-platform,eemirtekin/edx-platform,jbassen/edx-platform,carsongee/edx-platform,ampax/edx-platform-backup,valtech-mooc/edx-platform,procangroup/edx-platform,halvertoluke/edx-platform,shashank971/edx-platform,cyanna/edx-platform,andyzsf/edx,prarthitm/edxplatform,edry/edx-platform,don-github/edx-platform,vikas1885/test1,ovnicraft/edx-platform,IndonesiaX/edx-platform,eemirtekin/edx-platform,tanmaykm/edx-platform,sudheerchintala/LearnEraPlatForm,jbassen/edx-platform,eduNEXT/edx-platform,torchingloom/edx-platform,prarthitm/edxplatform,iivic/BoiseStateX,bigdatauniversity/edx-platform,dsajkl/123,jjmiranda/edx-platform,atsolakid/edx-platform,arifsetiawan/edx-platform,waheedahmed/edx-platform,peterm-itr/edx-platform,arifsetiawan/edx-platform,SravanthiSinha/edx-platform,gymnasium/edx-platform,ampax/edx-platform,RPI-OPENEDX/edx-platform,CourseTalk/edx-platform,eduNEXT/edx-platform,Edraak/edraak-platform,OmarIthawi/edx-platform,cecep-edu/edx-platform,ahmadiga/min_edx,shubhdev/openedx,bitifirefly/edx-platform,TsinghuaX/edx-platform,jamiefolsom/edx-platform,hkawasaki/kawasaki-aio8-1,shurihell/testasia,kursitet/edx-platform,shubhdev/openedx,hkawasaki/kawasaki-aio8-2,IndonesiaX/edx-platform,edx-solutions/edx-platform,ahmadio/edx-platform,AkA84/edx-platform,SravanthiSinha/edx-platform,mahendra-r/edx-platform,zubair-arbi/edx-platform,JioEducation/edx-platform,J861449197/edx-platform,SravanthiSinha/edx-platform,beacloudgenius/edx-platform,DNFcode/edx-platform,marcore/edx-platform,nanolearningllc/edx-platform-cypress-2,BehavioralInsightsTeam/edx-platform,dsajkl/reqiop,jjmiranda/edx-platform,hkawasaki/kawasaki-aio8-2,appliedx/edx-platform,shubhdev/openedx,miptliot/edx-platform,RPI-OPENEDX/edx-platform,mjg2203/edx-platform-seas,SivilTaram/edx-platform,chauhanhardik/populo,zerobatu/edx-platform,peterm-itr/edx-platform,alu042/edx-platform,eemirtekin/edx-platform,openfun/edx-platform,sameetb-cuelogic/edx-platform-test,valtech-mooc/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx-platform,bdero/edx-platform,antoviaque/edx-platform,apigee/edx-platform,zubair-arbi/edx-platform,vismartltd/edx-platform,SravanthiSinha/edx-platform,ampax/edx-platform,jzoldak/edx-platform,pomegranited/edx-platform,mahendra-r/edx-platform,jswope00/GAI,kmoocdev/edx-platform,jswope00/griffinx,tanmaykm/edx-platform,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,pepeportela/edx-platform,appsembler/edx-platform,atsolakid/edx-platform,ferabra/edx-platform,torchingloom/edx-platform,pku9104038/edx-platform,synergeticsedx/deployment-wipro,Softmotions/edx-platform,chauhanhardik/populo,cognitiveclass/edx-platform,edx/edx-platform,doganov/edx-platform,UXE/local-edx,jelugbo/tundex,yokose-ks/edx-platform,louyihua/edx-platform,jbassen/edx-platform,solashirai/edx-platform,EDUlib/edx-platform,ovnicraft/edx-platform,synergeticsedx/deployment-wipro,benpatterson/edx-platform,knehez/edx-platform,DefyVentures/edx-platform,Lektorium-LLC/edx-platform,zhenzhai/edx-platform,pku9104038/edx-platform,atsolakid/edx-platform,solashirai/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,jswope00/griffinx,waheedahmed/edx-platform,mtlchun/edx,nttks/jenkins-test,cselis86/edx-platform,auferack08/edx-platform,marcore/edx-platform,dkarakats/edx-platform,procangroup/edx-platform,naresh21/synergetics-edx-platform,zofuthan/edx-platform,nanolearning/edx-platform,longmen21/edx-platform,JCBarahona/edX,DNFcode/edx-platform,mushtaqak/edx-platform,utecuy/edx-platform,cecep-edu/edx-platform,adoosii/edx-platform,J861449197/edx-platform,auferack08/edx-platform,jazkarta/edx-platform-for-isc,sudheerchintala/LearnEraPlatForm,benpatterson/edx-platform,stvstnfrd/edx-platform,Kalyzee/edx-platform,inares/edx-platform,jonathan-beard/edx-platform,xuxiao19910803/edx,rue89-tech/edx-platform,ubc/edx-platform,appsembler/edx-platform,J861449197/edx-platform,chudaol/edx-platform,nanolearning/edx-platform,chand3040/cloud_that,carsongee/edx-platform,nttks/edx-platform,defance/edx-platform,caesar2164/edx-platform,wwj718/edx-platform,kursitet/edx-platform,motion2015/edx-platform,appliedx/edx-platform,mushtaqak/edx-platform,jswope00/griffinx,MSOpenTech/edx-platform,IONISx/edx-platform,franosincic/edx-platform,doismellburning/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,LICEF/edx-platform,bigdatauniversity/edx-platform,beacloudgenius/edx-platform,beacloudgenius/edx-platform,jamiefolsom/edx-platform,naresh21/synergetics-edx-platform,valtech-mooc/edx-platform,nanolearningllc/edx-platform-cypress-2,xinjiguaike/edx-platform,chudaol/edx-platform,lduarte1991/edx-platform,jonathan-beard/edx-platform,B-MOOC/edx-platform,alu042/edx-platform,kamalx/edx-platform,adoosii/edx-platform,wwj718/ANALYSE,EDUlib/edx-platform,edry/edx-platform,Endika/edx-platform,romain-li/edx-platform,10clouds/edx-platform,dcosentino/edx-platform,lduarte1991/edx-platform,unicri/edx-platform,teltek/edx-platform,RPI-OPENEDX/edx-platform,dsajkl/reqiop,eestay/edx-platform,appsembler/edx-platform,kmoocdev2/edx-platform,nagyistoce/edx-platform,philanthropy-u/edx-platform,ahmadiga/min_edx,mcgachey/edx-platform,vismartltd/edx-platform,y12uc231/edx-platform,antonve/s4-project-mooc,bitifirefly/edx-platform,CourseTalk/edx-platform,stvstnfrd/edx-platform,alexthered/kienhoc-platform,polimediaupv/edx-platform,miptliot/edx-platform,abdoosh00/edraak,SivilTaram/edx-platform,raccoongang/edx-platform,gsehub/edx-platform,motion2015/edx-platform,Edraak/edx-platform,romain-li/edx-platform,arifsetiawan/edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-0,shubhdev/edxOnBaadal,AkA84/edx-platform,morenopc/edx-platform,wwj718/edx-platform,simbs/edx-platform,ZLLab-Mooc/edx-platform,prarthitm/edxplatform,cpennington/edx-platform,xingyepei/edx-platform,jelugbo/tundex,jolyonb/edx-platform,shubhdev/edx-platform,unicri/edx-platform,olexiim/edx-platform,cselis86/edx-platform,EDUlib/edx-platform,marcore/edx-platform,nikolas/edx-platform,Kalyzee/edx-platform,eduNEXT/edunext-platform,ampax/edx-platform-backup,LICEF/edx-platform,IndonesiaX/edx-platform,cpennington/edx-platform,ampax/edx-platform-backup,alu042/edx-platform,antoviaque/edx-platform,DefyVentures/edx-platform,jruiperezv/ANALYSE,UOMx/edx-platform,Livit/Livit.Learn.EdX,zerobatu/edx-platform,dkarakats/edx-platform,halvertoluke/edx-platform,DNFcode/edx-platform,jamesblunt/edx-platform,pabloborrego93/edx-platform,zadgroup/edx-platform,zubair-arbi/edx-platform,EDUlib/edx-platform,cognitiveclass/edx-platform,fly19890211/edx-platform,sameetb-cuelogic/edx-platform-test,itsjeyd/edx-platform,auferack08/edx-platform,antonve/s4-project-mooc,chand3040/cloud_that,jzoldak/edx-platform,cyanna/edx-platform,mjg2203/edx-platform-seas,Shrhawk/edx-platform,franosincic/edx-platform,IONISx/edx-platform,B-MOOC/edx-platform,miptliot/edx-platform,IndonesiaX/edx-platform,Shrhawk/edx-platform,chauhanhardik/populo_2,bdero/edx-platform,ahmedaljazzar/edx-platform,ZLLab-Mooc/edx-platform,jelugbo/tundex,shabab12/edx-platform,mbareta/edx-platform-ft,etzhou/edx-platform,beni55/edx-platform,Edraak/circleci-edx-platform,mushtaqak/edx-platform,caesar2164/edx-platform,zubair-arbi/edx-platform,zadgroup/edx-platform,DNFcode/edx-platform,10clouds/edx-platform,Kalyzee/edx-platform,lduarte1991/edx-platform,10clouds/edx-platform,kmoocdev2/edx-platform,jazkarta/edx-platform-for-isc,UXE/local-edx,appliedx/edx-platform,vasyarv/edx-platform,motion2015/a3,apigee/edx-platform,chrisndodge/edx-platform,proversity-org/edx-platform,ESOedX/edx-platform,zhenzhai/edx-platform,don-github/edx-platform,edx/edx-platform,philanthropy-u/edx-platform,MakeHer/edx-platform,xuxiao19910803/edx,ferabra/edx-platform,nikolas/edx-platform,yokose-ks/edx-platform,ahmedaljazzar/edx-platform,simbs/edx-platform,nanolearning/edx-platform,deepsrijit1105/edx-platform,halvertoluke/edx-platform,abdoosh00/edraak,chauhanhardik/populo,cognitiveclass/edx-platform,motion2015/a3,OmarIthawi/edx-platform,bitifirefly/edx-platform,sameetb-cuelogic/edx-platform-test,arbrandes/edx-platform,msegado/edx-platform,SravanthiSinha/edx-platform,philanthropy-u/edx-platform,ak2703/edx-platform,y12uc231/edx-platform,don-github/edx-platform,chauhanhardik/populo,vikas1885/test1,kxliugang/edx-platform,MSOpenTech/edx-platform,abdoosh00/edx-rtl-final,rismalrv/edx-platform,nttks/jenkins-test,doismellburning/edx-platform,hastexo/edx-platform,vasyarv/edx-platform,andyzsf/edx,shabab12/edx-platform,olexiim/edx-platform,gsehub/edx-platform,Shrhawk/edx-platform,chauhanhardik/populo_2,arbrandes/edx-platform,proversity-org/edx-platform,Unow/edx-platform,doganov/edx-platform,Stanford-Online/edx-platform,xuxiao19910803/edx-platform,4eek/edx-platform,waheedahmed/edx-platform,Lektorium-LLC/edx-platform,chrisndodge/edx-platform,solashirai/edx-platform,jswope00/GAI,rhndg/openedx,alexthered/kienhoc-platform,pelikanchik/edx-platform,jbassen/edx-platform,carsongee/edx-platform,jonathan-beard/edx-platform,jazkarta/edx-platform,nttks/jenkins-test,shashank971/edx-platform,bitifirefly/edx-platform,utecuy/edx-platform,analyseuc3m/ANALYSE-v1,jazkarta/edx-platform-for-isc,inares/edx-platform,Unow/edx-platform,andyzsf/edx,CredoReference/edx-platform,chauhanhardik/populo_2,dkarakats/edx-platform,eduNEXT/edunext-platform,kmoocdev/edx-platform,xinjiguaike/edx-platform,deepsrijit1105/edx-platform,rhndg/openedx,devs1991/test_edx_docmode,franosincic/edx-platform,JioEducation/edx-platform,nanolearningllc/edx-platform-cypress,inares/edx-platform,longmen21/edx-platform,eestay/edx-platform,devs1991/test_edx_docmode,beni55/edx-platform,jswope00/griffinx,cselis86/edx-platform,TeachAtTUM/edx-platform,raccoongang/edx-platform,iivic/BoiseStateX,ak2703/edx-platform,unicri/edx-platform,peterm-itr/edx-platform,Edraak/circleci-edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,dsajkl/123,Lektorium-LLC/edx-platform,romain-li/edx-platform,doismellburning/edx-platform,ampax/edx-platform-backup,ESOedX/edx-platform,zadgroup/edx-platform,pomegranited/edx-platform,BehavioralInsightsTeam/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo,jamesblunt/edx-platform | from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return "ERROR: This module is unknown--students will not see it at all"
else:
return ""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
hidden-module-unicode: Return unicode string to pass assertion
XBlock Fragments expect unicode strings, and fail on an assertion when
it isn't:
```
2013-11-14 07:55:50,774 ERROR 3788 [django.request] base.py:215 - Internal Server Error: /courses/TestU/TST101/now/courseware/41d55c576a574fde99319420228f7f88/5fef5794e34842f4a2d45ebcdeaa9a3a/
Traceback (most recent call last):
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 20, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/utils/decorators.py", line 91, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 75, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/edx/app/edxapp/edx-platform/lms/djangoapps/courseware/views.py", line 407, in index
context['fragment'] = section_module.render('student_view')
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/seq_module.py", line 77, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/vertical_module.py", line 27, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 464, in student_view
return Fragment(self.get_html())
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 34, in __init__
self.add_content(content)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 71, in add_content
assert isinstance(content, unicode)
AssertionError
``` | from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return u"ERROR: This module is unknown--students will not see it at all"
else:
return u""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
| <commit_before>from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return "ERROR: This module is unknown--students will not see it at all"
else:
return ""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
<commit_msg>hidden-module-unicode: Return unicode string to pass assertion
XBlock Fragments expect unicode strings, and fail on an assertion when
it isn't:
```
2013-11-14 07:55:50,774 ERROR 3788 [django.request] base.py:215 - Internal Server Error: /courses/TestU/TST101/now/courseware/41d55c576a574fde99319420228f7f88/5fef5794e34842f4a2d45ebcdeaa9a3a/
Traceback (most recent call last):
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 20, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/utils/decorators.py", line 91, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 75, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/edx/app/edxapp/edx-platform/lms/djangoapps/courseware/views.py", line 407, in index
context['fragment'] = section_module.render('student_view')
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/seq_module.py", line 77, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/vertical_module.py", line 27, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 464, in student_view
return Fragment(self.get_html())
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 34, in __init__
self.add_content(content)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 71, in add_content
assert isinstance(content, unicode)
AssertionError
```<commit_after> | from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return u"ERROR: This module is unknown--students will not see it at all"
else:
return u""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
| from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return "ERROR: This module is unknown--students will not see it at all"
else:
return ""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
hidden-module-unicode: Return unicode string to pass assertion
XBlock Fragments expect unicode strings, and fail on an assertion when
it isn't:
```
2013-11-14 07:55:50,774 ERROR 3788 [django.request] base.py:215 - Internal Server Error: /courses/TestU/TST101/now/courseware/41d55c576a574fde99319420228f7f88/5fef5794e34842f4a2d45ebcdeaa9a3a/
Traceback (most recent call last):
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 20, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/utils/decorators.py", line 91, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 75, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/edx/app/edxapp/edx-platform/lms/djangoapps/courseware/views.py", line 407, in index
context['fragment'] = section_module.render('student_view')
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/seq_module.py", line 77, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/vertical_module.py", line 27, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 464, in student_view
return Fragment(self.get_html())
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 34, in __init__
self.add_content(content)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 71, in add_content
assert isinstance(content, unicode)
AssertionError
```from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return u"ERROR: This module is unknown--students will not see it at all"
else:
return u""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
| <commit_before>from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return "ERROR: This module is unknown--students will not see it at all"
else:
return ""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
<commit_msg>hidden-module-unicode: Return unicode string to pass assertion
XBlock Fragments expect unicode strings, and fail on an assertion when
it isn't:
```
2013-11-14 07:55:50,774 ERROR 3788 [django.request] base.py:215 - Internal Server Error: /courses/TestU/TST101/now/courseware/41d55c576a574fde99319420228f7f88/5fef5794e34842f4a2d45ebcdeaa9a3a/
Traceback (most recent call last):
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/core/handlers/base.py", line 111, in get_response
response = callback(request, *callback_args, **callback_kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/contrib/auth/decorators.py", line 20, in _wrapped_view
return view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/utils/decorators.py", line 91, in _wrapped_view
response = view_func(request, *args, **kwargs)
File "/edx/app/edxapp/venvs/edxapp/local/lib/python2.7/site-packages/django/views/decorators/cache.py", line 75, in _cache_controlled
response = viewfunc(request, *args, **kw)
File "/edx/app/edxapp/edx-platform/lms/djangoapps/courseware/views.py", line 407, in index
context['fragment'] = section_module.render('student_view')
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/seq_module.py", line 77, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/vertical_module.py", line 27, in student_view
rendered_child = child.render('student_view', context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/core.py", line 156, in render
return self.runtime.render(self, view, context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 903, in render
return block.xmodule_runtime.render(to_render, view_name, context)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/runtime.py", line 356, in render
frag = view_fn(context)
File "/edx/app/edxapp/edx-platform/common/lib/xmodule/xmodule/x_module.py", line 464, in student_view
return Fragment(self.get_html())
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 34, in __init__
self.add_content(content)
File "/edx/app/edxapp/venvs/edxapp/src/xblock/xblock/fragment.py", line 71, in add_content
assert isinstance(content, unicode)
AssertionError
```<commit_after>from xmodule.x_module import XModule
from xmodule.raw_module import RawDescriptor
class HiddenModule(XModule):
def get_html(self):
if self.system.user_is_staff:
return u"ERROR: This module is unknown--students will not see it at all"
else:
return u""
class HiddenDescriptor(RawDescriptor):
module_class = HiddenModule
|
578fe6f7403de0f93b3ca2776092e5dfe8dbfa73 | twisted/plugins/docker_xylem_plugin.py | twisted/plugins/docker_xylem_plugin.py | import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
| import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
| Create /run/docker/plugins before using it. (@bearnard) | Create /run/docker/plugins before using it. (@bearnard)
| Python | mit | praekeltfoundation/docker-xylem,praekeltfoundation/docker-xylem | import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
Create /run/docker/plugins before using it. (@bearnard) | import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
| <commit_before>import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
<commit_msg>Create /run/docker/plugins before using it. (@bearnard)<commit_after> | import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
| import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
Create /run/docker/plugins before using it. (@bearnard)import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
| <commit_before>import yaml
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.UNIXServer(config.get(
'socket', "/run/docker/plugins/xylem.sock"),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
<commit_msg>Create /run/docker/plugins before using it. (@bearnard)<commit_after>import yaml
from zope.interface import implements
from twisted.python import filepath, usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from docker_xylem import service
class Options(usage.Options):
optParameters = [
["config", "c", "xylem-plugin.yml", "Config file"],
]
class DockerServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "docker_xylem"
description = "A docker plugin service for xylem"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
sockfp = filepath.FilePath("/run/docker/plugins/xylem.sock")
if not sockfp.parent().exists():
sockfp.parent().makedirs()
return internet.UNIXServer(
config.get('socket', sockfp.path),
server.Site(service.DockerService(config)))
serviceMaker = DockerServiceMaker()
|
45ead09898275154919ab9589abb610d42049782 | website/apps/ts_om/views/ScenarioValidationView.py | website/apps/ts_om/views/ScenarioValidationView.py | import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
@staticmethod
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
| import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
| Remove invalid staticmethod decorator from post method. | Fix: Remove invalid staticmethod decorator from post method.
| Python | mpl-2.0 | vecnet/om,vecnet/om,vecnet/om,vecnet/om,vecnet/om | import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
@staticmethod
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
Fix: Remove invalid staticmethod decorator from post method. | import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
| <commit_before>import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
@staticmethod
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
<commit_msg>Fix: Remove invalid staticmethod decorator from post method.<commit_after> | import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
| import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
@staticmethod
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
Fix: Remove invalid staticmethod decorator from post method.import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
| <commit_before>import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
@staticmethod
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
<commit_msg>Fix: Remove invalid staticmethod decorator from post method.<commit_after>import requests
from django.conf import settings
from django.http import HttpResponse
from django.views.generic.base import View
from website.apps.ts_om.check import check_url
def rest_validate(f):
validate_url = check_url(getattr(settings, "TS_OM_VALIDATE_URL", None), "validate")
response = requests.post(validate_url, data=f)
return response.text
class ScenarioValidationView(View):
def post(self, request):
json_str = rest_validate(request.read())
return HttpResponse(json_str, content_type="application/json")
|
861f2ea86e26fe27ba3f2f283c32ed0a9931c5fb | ce/analysis/core.py | ce/analysis/core.py | #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
| #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
if __name__ == '__main__':
e = '((a + 2) * (a + 3))'
ErrorAnalysis(e, {'a': cast_error('0.1', '0.2')})
| Add preliminary ErrorAnalysis test case | Add preliminary ErrorAnalysis test case
| Python | mit | admk/soap | #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
Add preliminary ErrorAnalysis test case | #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
if __name__ == '__main__':
e = '((a + 2) * (a + 3))'
ErrorAnalysis(e, {'a': cast_error('0.1', '0.2')})
| <commit_before>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
<commit_msg>Add preliminary ErrorAnalysis test case<commit_after> | #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
if __name__ == '__main__':
e = '((a + 2) * (a + 3))'
ErrorAnalysis(e, {'a': cast_error('0.1', '0.2')})
| #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
Add preliminary ErrorAnalysis test case#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
if __name__ == '__main__':
e = '((a + 2) * (a + 3))'
ErrorAnalysis(e, {'a': cast_error('0.1', '0.2')})
| <commit_before>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
<commit_msg>Add preliminary ErrorAnalysis test case<commit_after>#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..common import DynamicMethods
from ..expr import Expr, ExprTreeTransformer
from ..semantics import cast_error
class Analysis(DynamicMethods):
def __init__(self, e, **kwargs):
super(Analysis, self).__init__()
self.e = e
self.s = ExprTreeTransformer(Expr(e), **kwargs).closure()
def analyse(self):
return [(self._analyse(t), t) for t in self.s]
def _analyse(self, t):
l = self.list_methods(lambda m: m.endswith('analysis'))
return (f(t) for f in l)
class ErrorAnalysis(Analysis):
def __init__(self, e, v, **kwargs):
super(ErrorAnalysis, self).__init__(e, **kwargs)
self.v = v
def error_analysis(self, t):
return t.error(self.v)
if __name__ == '__main__':
e = '((a + 2) * (a + 3))'
ErrorAnalysis(e, {'a': cast_error('0.1', '0.2')})
|
80d02cffe20cfa6d370bd33856db480bf0ce5fea | backend/unichat/helpers.py | backend/unichat/helpers.py | from .models import School, User
import re
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
user_obj = User.objects.create_user(email=email, school=school, password=password)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
# TODO: Send signup mail to user
| from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
| Create random auth token on user signup | Create random auth token on user signup
| Python | mit | dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet | from .models import School, User
import re
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
user_obj = User.objects.create_user(email=email, school=school, password=password)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
# TODO: Send signup mail to user
Create random auth token on user signup | from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
| <commit_before>from .models import School, User
import re
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
user_obj = User.objects.create_user(email=email, school=school, password=password)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
# TODO: Send signup mail to user
<commit_msg>Create random auth token on user signup<commit_after> | from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
| from .models import School, User
import re
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
user_obj = User.objects.create_user(email=email, school=school, password=password)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
# TODO: Send signup mail to user
Create random auth token on user signupfrom .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
| <commit_before>from .models import School, User
import re
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
user_obj = User.objects.create_user(email=email, school=school, password=password)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
# TODO: Send signup mail to user
<commit_msg>Create random auth token on user signup<commit_after>from .models import School, User
import re
import string
import random
def get_school_list():
schools = School.objects.all()
school_list = []
for s in schools:
school = {}
school['id'] = s.id
school['name'] = s.name
school['site'] = s.site
school['university'] = s.university.name
school['city'] = s.university.city.name
school['country'] = s.university.city.country.name
school_list.append(school)
return school_list
def get_school_by_email(email):
for school in School.objects.all():
if re.match(school.mailRegex, email):
return school
return None
def create_user(email, school):
password = User.objects.make_random_password()
token = ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(40))
user_obj = User.objects.create_user(email=email, school=school, password=password, token=token)
user_obj.interestedInSchools.set(School.objects.all().values_list('id', flat=True))
user_obj.save()
print 'Email:', email, 'Password:', password, 'Token:', token
# TODO: Send signup mail to user
|
c4ea358c7cdcd76dc9bf35b54e22cc9be05dc62a | setuptools/command/sdist3.py | setuptools/command/sdist3.py | from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([f for f in files if f.endswith(".py")], write=True)
| import os
from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([os.path.join(base_dir, f) for f in files if f.endswith(".py")], write=True)
| Correct path names for fixed files. | Correct path names for fixed files.
--HG--
branch : distribute
extra : rebase_source : d5bd9c6fdb8cf2208eca9e0e5f37cb7fea1d14a5
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([f for f in files if f.endswith(".py")], write=True)
Correct path names for fixed files.
--HG--
branch : distribute
extra : rebase_source : d5bd9c6fdb8cf2208eca9e0e5f37cb7fea1d14a5 | import os
from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([os.path.join(base_dir, f) for f in files if f.endswith(".py")], write=True)
| <commit_before>from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([f for f in files if f.endswith(".py")], write=True)
<commit_msg>Correct path names for fixed files.
--HG--
branch : distribute
extra : rebase_source : d5bd9c6fdb8cf2208eca9e0e5f37cb7fea1d14a5<commit_after> | import os
from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([os.path.join(base_dir, f) for f in files if f.endswith(".py")], write=True)
| from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([f for f in files if f.endswith(".py")], write=True)
Correct path names for fixed files.
--HG--
branch : distribute
extra : rebase_source : d5bd9c6fdb8cf2208eca9e0e5f37cb7fea1d14a5import os
from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([os.path.join(base_dir, f) for f in files if f.endswith(".py")], write=True)
| <commit_before>from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([f for f in files if f.endswith(".py")], write=True)
<commit_msg>Correct path names for fixed files.
--HG--
branch : distribute
extra : rebase_source : d5bd9c6fdb8cf2208eca9e0e5f37cb7fea1d14a5<commit_after>import os
from distutils import log
from sdist import sdist
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class _RefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
class sdist3(sdist):
description = "sdist version that runs 2to3 on all sources before packaging"
fixer_names = None
def copy_file(self, file, dest, link=None):
# We ignore the link parameter, always demanding a copy, so that
# 2to3 won't overwrite the original file.
sdist.copy_file(self, file, dest)
def make_release_tree(self, base_dir, files):
sdist.make_release_tree(self, base_dir, files)
# run 2to3 on all files
fixer_names = self.fixer_names
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = _RefactoringTool(fixer_names)
r.refactor([os.path.join(base_dir, f) for f in files if f.endswith(".py")], write=True)
|
e4345634ea6a4c43db20ea1d3d33134b6ee6204d | alembic/versions/151b2f642877_text_to_json.py | alembic/versions/151b2f642877_text_to_json.py | """text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
| """text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
| Fix alembic revision after merge master | Fix alembic revision after merge master
| Python | agpl-3.0 | OpenNewsLabs/pybossa,PyBossa/pybossa,PyBossa/pybossa,Scifabric/pybossa,jean/pybossa,geotagx/pybossa,OpenNewsLabs/pybossa,jean/pybossa,Scifabric/pybossa,geotagx/pybossa | """text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
Fix alembic revision after merge master | """text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
| <commit_before>"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
<commit_msg>Fix alembic revision after merge master<commit_after> | """text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
| """text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
Fix alembic revision after merge master"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
| <commit_before>"""text to JSON
Revision ID: 151b2f642877
Revises: aee7291c81
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'aee7291c81'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
<commit_msg>Fix alembic revision after merge master<commit_after>"""text to JSON
Revision ID: 151b2f642877
Revises: ac115763654
Create Date: 2015-06-12 14:40:56.956657
"""
# revision identifiers, used by Alembic.
revision = '151b2f642877'
down_revision = 'ac115763654'
from alembic import op
import sqlalchemy as sa
def upgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE JSON USING info::JSON;'
op.execute(query)
def downgrade():
query = 'ALTER TABLE project ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE "user" ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
query = 'ALTER TABLE task_run ALTER COLUMN info TYPE TEXT USING info::TEXT;'
op.execute(query)
|
780ba3b8f1271dd0a4b78c7a3bb00f5f4fabf2eb | external_ip.py | external_ip.py | # -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by boucha.saltstack.com
'''
# Import Python Libs
import logging
import requests
from pprint import pprint
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = 'http://boucha.saltstack.com:8080'
if __opts__.get('request_external_ip', False):
try:
r = requests.get(check_url, timeout=0.1)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
| # -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by an IP reporting service.
SaltStack provides this at http://boucha.saltstack.com:8080
The following config item is mandatory to opt in
external_ip.server: 'http://boucha.saltstack.com:8080'
The following minion config items are optional:
external_ip.timeout: 0.5
'''
# Import Python Libs
import logging
import requests
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = __opts__.get('external_ip.server', False)
if check_url:
try:
timeout = __opts__.get('external_ip.timeout', 0.5)
r = requests.get(check_url, timeout=timeout)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
| Add requirement for opt in | Add requirement for opt in
| Python | apache-2.0 | UtahDave/external_ip | # -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by boucha.saltstack.com
'''
# Import Python Libs
import logging
import requests
from pprint import pprint
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = 'http://boucha.saltstack.com:8080'
if __opts__.get('request_external_ip', False):
try:
r = requests.get(check_url, timeout=0.1)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
Add requirement for opt in | # -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by an IP reporting service.
SaltStack provides this at http://boucha.saltstack.com:8080
The following config item is mandatory to opt in
external_ip.server: 'http://boucha.saltstack.com:8080'
The following minion config items are optional:
external_ip.timeout: 0.5
'''
# Import Python Libs
import logging
import requests
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = __opts__.get('external_ip.server', False)
if check_url:
try:
timeout = __opts__.get('external_ip.timeout', 0.5)
r = requests.get(check_url, timeout=timeout)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
| <commit_before># -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by boucha.saltstack.com
'''
# Import Python Libs
import logging
import requests
from pprint import pprint
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = 'http://boucha.saltstack.com:8080'
if __opts__.get('request_external_ip', False):
try:
r = requests.get(check_url, timeout=0.1)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
<commit_msg>Add requirement for opt in<commit_after> | # -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by an IP reporting service.
SaltStack provides this at http://boucha.saltstack.com:8080
The following config item is mandatory to opt in
external_ip.server: 'http://boucha.saltstack.com:8080'
The following minion config items are optional:
external_ip.timeout: 0.5
'''
# Import Python Libs
import logging
import requests
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = __opts__.get('external_ip.server', False)
if check_url:
try:
timeout = __opts__.get('external_ip.timeout', 0.5)
r = requests.get(check_url, timeout=timeout)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
| # -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by boucha.saltstack.com
'''
# Import Python Libs
import logging
import requests
from pprint import pprint
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = 'http://boucha.saltstack.com:8080'
if __opts__.get('request_external_ip', False):
try:
r = requests.get(check_url, timeout=0.1)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
Add requirement for opt in# -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by an IP reporting service.
SaltStack provides this at http://boucha.saltstack.com:8080
The following config item is mandatory to opt in
external_ip.server: 'http://boucha.saltstack.com:8080'
The following minion config items are optional:
external_ip.timeout: 0.5
'''
# Import Python Libs
import logging
import requests
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = __opts__.get('external_ip.server', False)
if check_url:
try:
timeout = __opts__.get('external_ip.timeout', 0.5)
r = requests.get(check_url, timeout=timeout)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
| <commit_before># -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by boucha.saltstack.com
'''
# Import Python Libs
import logging
import requests
from pprint import pprint
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = 'http://boucha.saltstack.com:8080'
if __opts__.get('request_external_ip', False):
try:
r = requests.get(check_url, timeout=0.1)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
<commit_msg>Add requirement for opt in<commit_after># -*- coding: utf-8 -*-
'''
:codeauthor: David Boucha
:copyright: © 2013 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
salt.grains.external_ip
~~~~~~~~~~~~~~~~~~~~~~~
Return the external IP address reported by an IP reporting service.
SaltStack provides this at http://boucha.saltstack.com:8080
The following config item is mandatory to opt in
external_ip.server: 'http://boucha.saltstack.com:8080'
The following minion config items are optional:
external_ip.timeout: 0.5
'''
# Import Python Libs
import logging
import requests
# Import salt libs
from salt.utils.validate.net import ipv4_addr as _ipv4_addr
# Import errors
from requests.exceptions import Timeout, ConnectionError, HTTPError
log = logging.getLogger(__name__)
def external_ip():
'''
Return the external IP address
'''
check_url = __opts__.get('external_ip.server', False)
if check_url:
try:
timeout = __opts__.get('external_ip.timeout', 0.5)
r = requests.get(check_url, timeout=timeout)
ip_addr = r.json()
return {'external_ip': ip_addr['ip_addr']}
except Timeout as exc:
log.debug('Timeout exceeded: {0}'.format(exc))
except (ConnectionError, HTTPError) as exc:
log.debug('Connection error: {0}'.format(exc))
return {'external_ip': None}
|
7e47ce789c9eb363f941ead2b3b199152dffff1c | pushmanager/tests/test_bookmarklet.py | pushmanager/tests/test_bookmarklet.py | import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
| import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
T.assert_in("%TICKET%", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
| Test added for the ticket tracker url. | Test added for the ticket tracker url.
The automatic test now checks also the ticket tracker url (just tests that
the javascript contains the word %TICKET%).
| Python | apache-2.0 | Yelp/pushmanager,YelpArchive/pushmanager,Yelp/pushmanager,Yelp/pushmanager,YelpArchive/pushmanager,YelpArchive/pushmanager,YelpArchive/pushmanager,Yelp/pushmanager | import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
Test added for the ticket tracker url.
The automatic test now checks also the ticket tracker url (just tests that
the javascript contains the word %TICKET%). | import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
T.assert_in("%TICKET%", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
| <commit_before>import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
<commit_msg>Test added for the ticket tracker url.
The automatic test now checks also the ticket tracker url (just tests that
the javascript contains the word %TICKET%).<commit_after> | import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
T.assert_in("%TICKET%", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
| import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
Test added for the ticket tracker url.
The automatic test now checks also the ticket tracker url (just tests that
the javascript contains the word %TICKET%).import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
T.assert_in("%TICKET%", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
| <commit_before>import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
<commit_msg>Test added for the ticket tracker url.
The automatic test now checks also the ticket tracker url (just tests that
the javascript contains the word %TICKET%).<commit_after>import contextlib
import mock
import testify as T
from pushmanager.handlers import CheckSitesBookmarkletHandler
from pushmanager.handlers import CreateRequestBookmarkletHandler
from pushmanager.testing.testservlet import AsyncTestCase
class BookmarkletTest(T.TestCase, AsyncTestCase):
def get_handlers(self):
return [
(CreateRequestBookmarkletHandler.url, CreateRequestBookmarkletHandler),
(CheckSitesBookmarkletHandler.url, CheckSitesBookmarkletHandler),
]
@contextlib.contextmanager
def page(self, handler):
with mock.patch.object(handler, "get_current_user"):
handler.get_current_user.return_value = "testuser"
response = self.fetch(str(handler.url))
yield response
def test_create_request_bookmarklet(self):
with self.page(CreateRequestBookmarkletHandler) as response:
# We'll get a javascript as the body, just check some
# variable names/strings that we know is there in the
# script.
T.assert_equal(response.error, None)
T.assert_in("ticketNumberToURL", response.body)
T.assert_in("codeReview", response.body)
T.assert_in("%TICKET%", response.body)
def test_check_sites_bookmarklet(self):
with self.page(CheckSitesBookmarkletHandler) as response:
# See comment above in test_create_request_bookmarklet
T.assert_equal(response.error, None)
T.assert_in("window.open", response.body)
|
e5bec10f3788e435af63970158e35cb3f2ad4f2a | simple_faq/views.py | simple_faq/views.py | from django.views.generic import TemplateView
from simple_faq.models import Topic
class Topics(TemplateView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic | from django.views.generic import ListView
from simple_faq.models import Topic
class Topics(ListView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic | Fix father class of topics view | Fix father class of topics view
@15m
| Python | mit | devartis/django-simple-faq,devartis/django-simple-faq | from django.views.generic import TemplateView
from simple_faq.models import Topic
class Topics(TemplateView):
template_name = "topics.html"
context_object_name = "topics"
model = TopicFix father class of topics view
@15m | from django.views.generic import ListView
from simple_faq.models import Topic
class Topics(ListView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic | <commit_before>from django.views.generic import TemplateView
from simple_faq.models import Topic
class Topics(TemplateView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic<commit_msg>Fix father class of topics view
@15m<commit_after> | from django.views.generic import ListView
from simple_faq.models import Topic
class Topics(ListView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic | from django.views.generic import TemplateView
from simple_faq.models import Topic
class Topics(TemplateView):
template_name = "topics.html"
context_object_name = "topics"
model = TopicFix father class of topics view
@15mfrom django.views.generic import ListView
from simple_faq.models import Topic
class Topics(ListView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic | <commit_before>from django.views.generic import TemplateView
from simple_faq.models import Topic
class Topics(TemplateView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic<commit_msg>Fix father class of topics view
@15m<commit_after>from django.views.generic import ListView
from simple_faq.models import Topic
class Topics(ListView):
template_name = "topics.html"
context_object_name = "topics"
model = Topic |
3fcdb9e64ef955fd0a7e5b2fda481d351dfb4d18 | spotify/__init__.py | spotify/__init__.py | from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
| from __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
| Add NullHandler to the 'spotify' logger | Add NullHandler to the 'spotify' logger
| Python | apache-2.0 | jodal/pyspotify,mopidy/pyspotify,kotamat/pyspotify,jodal/pyspotify,kotamat/pyspotify,felix1m/pyspotify,mopidy/pyspotify,kotamat/pyspotify,felix1m/pyspotify,felix1m/pyspotify,jodal/pyspotify | from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
Add NullHandler to the 'spotify' logger | from __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
| <commit_before>from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
<commit_msg>Add NullHandler to the 'spotify' logger<commit_after> | from __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
| from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
Add NullHandler to the 'spotify' loggerfrom __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
| <commit_before>from __future__ import unicode_literals
import os
import weakref
import cffi
__version__ = '2.0.0a1'
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
<commit_msg>Add NullHandler to the 'spotify' logger<commit_after>from __future__ import unicode_literals
import logging
import os
import weakref
import cffi
__version__ = '2.0.0a1'
# Log to nowhere by default. For details, see:
# http://docs.python.org/2/howto/logging.html#library-config
logging.getLogger('spotify').addHandler(logging.NullHandler())
_header_file = os.path.join(os.path.dirname(__file__), 'api.processed.h')
_header = open(_header_file).read()
_header += '#define SPOTIFY_API_VERSION ...\n'
ffi = cffi.FFI()
ffi.cdef(_header)
lib = ffi.verify('#include "libspotify/api.h"', libraries=[str('spotify')])
# Mapping between keys and objects that should be kept alive as long as the key
# is alive. May be used to keep objects alive when there isn't a more
# convenient place to keep a reference to it. The keys are weakrefs, so entries
# disappear from the dict when the key is garbage collected, potentially
# causing objects associated to the key to be garbage collected as well. For
# further details, refer to the CFFI docs.
global_weakrefs = weakref.WeakKeyDictionary()
from spotify.error import * # noqa
|
65655186f22f1cdc4699cfbddb8b11ccf3ad292a | apps/challenge/forms.py | apps/challenge/forms.py | from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
| from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def search(self):
sqs = super(ChallengeSearchForm, self).search()
sqs = sqs.order_by('-total_members','-type','score')
return sqs
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
| Improve search ordering in challenges (+packages) | Improve search ordering in challenges (+packages)
| Python | bsd-3-clause | mfitzp/smrtr,mfitzp/smrtr | from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
Improve search ordering in challenges (+packages) | from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def search(self):
sqs = super(ChallengeSearchForm, self).search()
sqs = sqs.order_by('-total_members','-type','score')
return sqs
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
| <commit_before>from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
<commit_msg>Improve search ordering in challenges (+packages)<commit_after> | from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def search(self):
sqs = super(ChallengeSearchForm, self).search()
sqs = sqs.order_by('-total_members','-type','score')
return sqs
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
| from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
Improve search ordering in challenges (+packages)from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def search(self):
sqs = super(ChallengeSearchForm, self).search()
sqs = sqs.order_by('-total_members','-type','score')
return sqs
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
| <commit_before>from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
<commit_msg>Improve search ordering in challenges (+packages)<commit_after>from django import forms
from django.contrib.admin import widgets
# Smrtr
from network.models import Network
from challenge.models import Challenge
# External
from haystack.forms import SearchForm
class ChallengeForm(forms.ModelForm):
class Meta:
model = Challenge
fields = ['name', 'description','image']
def __init__(self, request, *args, **kwargs):
super(ChallengeForm, self).__init__(*args, **kwargs)
#if request: # If passed only show networks the user is on
#self.fields['network'].queryset = Network.objects.filter(usernetwork__user=request.user)
class ChallengeSearchForm(SearchForm):
def search(self):
sqs = super(ChallengeSearchForm, self).search()
sqs = sqs.order_by('-total_members','-type','score')
return sqs
def __init__(self, *args, **kwargs):
super(ChallengeSearchForm, self).__init__(*args, **kwargs)
|
95bd5b96fcbc62133aac4045a77ed7b7e7d342a5 | pyhprof/__init__.py | pyhprof/__init__.py | from pyhprof import *
from pyhprof import __doc__
from pyhprof import __all__
| '''Library for parsing and analyzing Java hprof files
'''
from .parsers import HProfParser, HeapDumpParser | Include parsers in top level module | Include parsers in top level module
| Python | apache-2.0 | matthagy/pyhprof | from pyhprof import *
from pyhprof import __doc__
from pyhprof import __all__
Include parsers in top level module | '''Library for parsing and analyzing Java hprof files
'''
from .parsers import HProfParser, HeapDumpParser | <commit_before>from pyhprof import *
from pyhprof import __doc__
from pyhprof import __all__
<commit_msg>Include parsers in top level module<commit_after> | '''Library for parsing and analyzing Java hprof files
'''
from .parsers import HProfParser, HeapDumpParser | from pyhprof import *
from pyhprof import __doc__
from pyhprof import __all__
Include parsers in top level module'''Library for parsing and analyzing Java hprof files
'''
from .parsers import HProfParser, HeapDumpParser | <commit_before>from pyhprof import *
from pyhprof import __doc__
from pyhprof import __all__
<commit_msg>Include parsers in top level module<commit_after>'''Library for parsing and analyzing Java hprof files
'''
from .parsers import HProfParser, HeapDumpParser |
761ba162338b86bc16aa4b642cc51a297d5491d6 | games/notifier.py | games/notifier.py | from games import models
def get_unpublished_installers(count=10):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=10):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=10):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=10):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
| from games import models
DEFAULT_COUNT = 12
def get_unpublished_installers(count=DEFAULT_COUNT):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=DEFAULT_COUNT):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=DEFAULT_COUNT):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=DEFAULT_COUNT):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
| Set the default number of items to 12 because it fits more nicely on the email | Set the default number of items to 12 because it fits more nicely on the email
| Python | agpl-3.0 | lutris/website,Turupawn/website,lutris/website,lutris/website,Turupawn/website,Turupawn/website,Turupawn/website,lutris/website | from games import models
def get_unpublished_installers(count=10):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=10):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=10):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=10):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
Set the default number of items to 12 because it fits more nicely on the email | from games import models
DEFAULT_COUNT = 12
def get_unpublished_installers(count=DEFAULT_COUNT):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=DEFAULT_COUNT):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=DEFAULT_COUNT):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=DEFAULT_COUNT):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
| <commit_before>from games import models
def get_unpublished_installers(count=10):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=10):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=10):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=10):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
<commit_msg>Set the default number of items to 12 because it fits more nicely on the email<commit_after> | from games import models
DEFAULT_COUNT = 12
def get_unpublished_installers(count=DEFAULT_COUNT):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=DEFAULT_COUNT):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=DEFAULT_COUNT):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=DEFAULT_COUNT):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
| from games import models
def get_unpublished_installers(count=10):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=10):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=10):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=10):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
Set the default number of items to 12 because it fits more nicely on the emailfrom games import models
DEFAULT_COUNT = 12
def get_unpublished_installers(count=DEFAULT_COUNT):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=DEFAULT_COUNT):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=DEFAULT_COUNT):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=DEFAULT_COUNT):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
| <commit_before>from games import models
def get_unpublished_installers(count=10):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=10):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=10):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=10):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
<commit_msg>Set the default number of items to 12 because it fits more nicely on the email<commit_after>from games import models
DEFAULT_COUNT = 12
def get_unpublished_installers(count=DEFAULT_COUNT):
return models.Installer.objects.filter(published=False).order_by('?')[:count]
def get_unpublished_screenshots(count=DEFAULT_COUNT):
return models.Screenshot.objects.filter(published=False).order_by('?')[:count]
def get_unreviewed_game_submissions(count=DEFAULT_COUNT):
return models.GameSubmission.objects.filter(accepted_at__isnull=True).order_by('?')[:count]
def get_installer_issues(count=DEFAULT_COUNT):
return models.InstallerIssue.objects.all().order_by('?')[:count]
def get_mod_mail_content():
return {
'installers': get_unpublished_installers(),
'screenshots': get_unpublished_screenshots(),
'submissions': get_unreviewed_game_submissions(),
'issues': get_installer_issues()
}
|
1fc452c7b0a8c55eee40e496de4750045ef725a4 | overspill/settings/local.py | overspill/settings/local.py | from __future__ import absolute_import
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
| from __future__ import absolute_import
import os
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.getenv('STATIC_ROOT')
MEDIA_ROOT = os.getenv('STATIC_ROOT')
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
| Set MEDIA_ROOT and STATIC_ROOT settings | Set MEDIA_ROOT and STATIC_ROOT settings
| Python | apache-2.0 | iamsteadman/overspill,iamsteadman/overspill | from __future__ import absolute_import
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
Set MEDIA_ROOT and STATIC_ROOT settings | from __future__ import absolute_import
import os
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.getenv('STATIC_ROOT')
MEDIA_ROOT = os.getenv('STATIC_ROOT')
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
| <commit_before>from __future__ import absolute_import
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
<commit_msg>Set MEDIA_ROOT and STATIC_ROOT settings<commit_after> | from __future__ import absolute_import
import os
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.getenv('STATIC_ROOT')
MEDIA_ROOT = os.getenv('STATIC_ROOT')
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
| from __future__ import absolute_import
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
Set MEDIA_ROOT and STATIC_ROOT settingsfrom __future__ import absolute_import
import os
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.getenv('STATIC_ROOT')
MEDIA_ROOT = os.getenv('STATIC_ROOT')
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
| <commit_before>from __future__ import absolute_import
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
<commit_msg>Set MEDIA_ROOT and STATIC_ROOT settings<commit_after>from __future__ import absolute_import
import os
from djcelery import setup_loader
from .base import *
DEBUG = True
ALLOWED_HOSTS = ['*']
STATIC_ROOT = os.getenv('STATIC_ROOT')
MEDIA_ROOT = os.getenv('STATIC_ROOT')
STATIC_URL = '/static/'
CELERY_REDIRECT_STDOUTS = False
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
setup_loader()
|
1d6ddc15fd6b195f3331e8c4022f0760da0fe9ef | cibopath/cli.py | cibopath/cli.py | # -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
| # -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
| Remove unwanted whitespace from doc str | Remove unwanted whitespace from doc str
| Python | bsd-3-clause | hackebrot/cibopath | # -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
Remove unwanted whitespace from doc str | # -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
| <commit_before># -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
<commit_msg>Remove unwanted whitespace from doc str<commit_after> | # -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
| # -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
Remove unwanted whitespace from doc str# -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
| <commit_before># -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
<commit_msg>Remove unwanted whitespace from doc str<commit_after># -*- coding: utf-8 -*-
import click
from cibopath import __version__
@click.group()
@click.version_option(__version__, u'-V', u'--version', prog_name='Cibopath')
def cli():
"""Cibopath - Search Cookiecutters on GitHub."""
@cli.command('update')
def update_cmd():
print('update')
main = cli
|
0825919e14ae34abbafaf982533c6936a9958bc4 | shopify_python/__init__.py | shopify_python/__init__.py | # Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.0'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
| # Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.1'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
| Increase version number to 0.4.1 | Increase version number to 0.4.1
| Python | mit | Shopify/shopify_python | # Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.0'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
Increase version number to 0.4.1 | # Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.1'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
| <commit_before># Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.0'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
<commit_msg>Increase version number to 0.4.1<commit_after> | # Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.1'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
| # Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.0'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
Increase version number to 0.4.1# Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.1'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
| <commit_before># Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.0'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
<commit_msg>Increase version number to 0.4.1<commit_after># Copyright (c) 2017 "Shopify inc." All rights reserved.
# Use of this source code is governed by a MIT-style license that can be found in the LICENSE file.
from __future__ import unicode_literals
from pylint import lint
from shopify_python import google_styleguide
from shopify_python import shopify_styleguide
__version__ = '0.4.1'
def register(linter): # type: (lint.PyLinter) -> None
google_styleguide.register_checkers(linter)
shopify_styleguide.register_checkers(linter)
|
efb14d75d04d0200b37af1c4c3bb50b61f0b2a7e | km3pipe/__init__.py | km3pipe/__init__.py | # coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from .io import GenericPump # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
| # coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
| FIX also drop genericpump/h5pump from init | FIX also drop genericpump/h5pump from init
| Python | mit | tamasgal/km3pipe,tamasgal/km3pipe | # coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from .io import GenericPump # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
FIX also drop genericpump/h5pump from init | # coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
| <commit_before># coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from .io import GenericPump # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
<commit_msg>FIX also drop genericpump/h5pump from init<commit_after> | # coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
| # coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from .io import GenericPump # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
FIX also drop genericpump/h5pump from init# coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
| <commit_before># coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
from .io import GenericPump # noqa
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
<commit_msg>FIX also drop genericpump/h5pump from init<commit_after># coding=utf-8
# Filename: __init__.py
"""
The extemporary KM3NeT analysis framework.
"""
from __future__ import division, absolute_import, print_function
try:
__KM3PIPE_SETUP__
except NameError:
__KM3PIPE_SETUP__ = False
from .__version__ import version, version_info # noqa
if not __KM3PIPE_SETUP__:
from .core import (Pipeline, Module, Pump, Blob, Run, # noqa
Geometry, AanetGeometry)
__author__ = "Tamas Gal and Moritz Lotze"
__copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration."
__credits__ = ["Thomas Heid"]
__license__ = "MIT"
__version__ = version
__maintainer__ = "Tamas Gal and Moritz Lotze"
__email__ = "tgal@km3net.de"
__status__ = "Development"
|
46bcad1e20e57f66498e7a70b8f3be929115bde6 | incunafein/module/page/extensions/prepared_date.py | incunafein/module/page/extensions/prepared_date.py | from django.db import models
def get_prepared_date(cls):
return cls.prepared_date or cls.parent.prepared_date
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
cls.add_to_class('get_prepared_date', get_prepared_date)
| from django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter():
if not cls._prepared_date:
try:
return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return cls._prepared_date
def setter(value):
cls._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
| Return parent date if there isn't one on the current object | Return parent date if there isn't one on the current object
Look for a prepared date in the ancestors of the current object and use
that if it exists
| Python | bsd-2-clause | incuna/incuna-feincms,incuna/incuna-feincms,incuna/incuna-feincms | from django.db import models
def get_prepared_date(cls):
return cls.prepared_date or cls.parent.prepared_date
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
cls.add_to_class('get_prepared_date', get_prepared_date)
Return parent date if there isn't one on the current object
Look for a prepared date in the ancestors of the current object and use
that if it exists | from django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter():
if not cls._prepared_date:
try:
return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return cls._prepared_date
def setter(value):
cls._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
| <commit_before>from django.db import models
def get_prepared_date(cls):
return cls.prepared_date or cls.parent.prepared_date
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
cls.add_to_class('get_prepared_date', get_prepared_date)
<commit_msg>Return parent date if there isn't one on the current object
Look for a prepared date in the ancestors of the current object and use
that if it exists<commit_after> | from django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter():
if not cls._prepared_date:
try:
return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return cls._prepared_date
def setter(value):
cls._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
| from django.db import models
def get_prepared_date(cls):
return cls.prepared_date or cls.parent.prepared_date
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
cls.add_to_class('get_prepared_date', get_prepared_date)
Return parent date if there isn't one on the current object
Look for a prepared date in the ancestors of the current object and use
that if it existsfrom django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter():
if not cls._prepared_date:
try:
return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return cls._prepared_date
def setter(value):
cls._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
| <commit_before>from django.db import models
def get_prepared_date(cls):
return cls.prepared_date or cls.parent.prepared_date
def register(cls, admin_cls):
cls.add_to_class('prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
cls.add_to_class('get_prepared_date', get_prepared_date)
<commit_msg>Return parent date if there isn't one on the current object
Look for a prepared date in the ancestors of the current object and use
that if it exists<commit_after>from django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter():
if not cls._prepared_date:
try:
return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return cls._prepared_date
def setter(value):
cls._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
|
83bf2da38eb67abab9005495289eb97b58c3856a | ec2_instance_change_type.py | ec2_instance_change_type.py | #!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli() | #!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli() | Change message that detects instance type | Change message that detects instance type
| Python | mit | thinhpham/aws-tools | #!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()Change message that detects instance type | #!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli() | <commit_before>#!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()<commit_msg>Change message that detects instance type<commit_after> | #!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli() | #!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()Change message that detects instance type#!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli() | <commit_before>#!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli()<commit_msg>Change message that detects instance type<commit_after>#!/usr/bin/env python
import sys
import click
from aws_util import Ec2Util
@click.command()
@click.option('-p', '--profile', default='default', help='Profile name to use.')
@click.argument('id_or_tag', required=True)
@click.argument('new_instance_type', required=True)
def cli(profile, id_or_tag, new_instance_type):
ec2 = Ec2Util(profile)
instance = ec2.get_instance(id_or_tag)
if instance:
old_instance_state = instance.state['Name']
old_instance_type = instance.instance_type
print('Current instance type is %s' % old_instance_type)
if new_instance_type != instance.instance_type:
ec2.change_instance_type(id_or_tag, new_instance_type)
instance.reload()
print('Instance type changed to %s successfully' % instance.instance_type)
else:
print('Current instance type is the same as new type. No need to do anything.')
else:
print('Error. Cannot find instance')
if __name__ == '__main__':
cli() |
e867722e4369fca2df9a4e07218815c7611f35be | examples/timeflies/timeflies_tkinter.py | examples/timeflies/timeflies_tkinter.py | from tkinter import Tk, Label, Frame
from rx import from_
from rx import operators as _
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = _.map(lambda ev: (label, ev, i))
delayer = _.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = _.flat_mapi(handle_label)
mapper = _.map(lambda c: Label(frame, text=c))
from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
| from tkinter import Tk, Label, Frame
import rx
from rx import operators as ops
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = ops.map(lambda ev: (label, ev, i))
delayer = ops.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = ops.flat_mapi(handle_label)
mapper = ops.map(lambda c: Label(frame, text=c))
rx.from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
| Use ops instead of _ | Use ops instead of _
| Python | mit | ReactiveX/RxPY,ReactiveX/RxPY | from tkinter import Tk, Label, Frame
from rx import from_
from rx import operators as _
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = _.map(lambda ev: (label, ev, i))
delayer = _.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = _.flat_mapi(handle_label)
mapper = _.map(lambda c: Label(frame, text=c))
from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
Use ops instead of _ | from tkinter import Tk, Label, Frame
import rx
from rx import operators as ops
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = ops.map(lambda ev: (label, ev, i))
delayer = ops.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = ops.flat_mapi(handle_label)
mapper = ops.map(lambda c: Label(frame, text=c))
rx.from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
| <commit_before>from tkinter import Tk, Label, Frame
from rx import from_
from rx import operators as _
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = _.map(lambda ev: (label, ev, i))
delayer = _.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = _.flat_mapi(handle_label)
mapper = _.map(lambda c: Label(frame, text=c))
from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
<commit_msg>Use ops instead of _<commit_after> | from tkinter import Tk, Label, Frame
import rx
from rx import operators as ops
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = ops.map(lambda ev: (label, ev, i))
delayer = ops.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = ops.flat_mapi(handle_label)
mapper = ops.map(lambda c: Label(frame, text=c))
rx.from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
| from tkinter import Tk, Label, Frame
from rx import from_
from rx import operators as _
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = _.map(lambda ev: (label, ev, i))
delayer = _.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = _.flat_mapi(handle_label)
mapper = _.map(lambda c: Label(frame, text=c))
from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
Use ops instead of _from tkinter import Tk, Label, Frame
import rx
from rx import operators as ops
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = ops.map(lambda ev: (label, ev, i))
delayer = ops.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = ops.flat_mapi(handle_label)
mapper = ops.map(lambda c: Label(frame, text=c))
rx.from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
| <commit_before>from tkinter import Tk, Label, Frame
from rx import from_
from rx import operators as _
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = _.map(lambda ev: (label, ev, i))
delayer = _.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = _.flat_mapi(handle_label)
mapper = _.map(lambda c: Label(frame, text=c))
from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
<commit_msg>Use ops instead of _<commit_after>from tkinter import Tk, Label, Frame
import rx
from rx import operators as ops
from rx.subjects import Subject
from rx.concurrency import TkinterScheduler
def main():
root = Tk()
root.title("Rx for Python rocks")
scheduler = TkinterScheduler(root)
mousemove = Subject()
frame = Frame(root, width=600, height=600)
frame.bind("<Motion>", mousemove.on_next)
text = 'TIME FLIES LIKE AN ARROW'
def on_next(info):
label, ev, i = info
label.place(x=ev.x + i*12 + 15, y=ev.y)
def handle_label(label, i):
label.config(dict(borderwidth=0, padx=0, pady=0))
mapper = ops.map(lambda ev: (label, ev, i))
delayer = ops.delay(i*100)
return mousemove.pipe(
delayer,
mapper
)
labeler = ops.flat_mapi(handle_label)
mapper = ops.map(lambda c: Label(frame, text=c))
rx.from_(text).pipe(
mapper,
labeler
).subscribe_(on_next, on_error=print, scheduler=scheduler)
frame.pack()
root.mainloop()
if __name__ == '__main__':
main()
|
2181d63c279965e4e694cae508a236f51d66d49b | data_structures/bitorrent/server/announce/torrent.py | data_structures/bitorrent/server/announce/torrent.py | import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else []
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else []
| import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else 0
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else 0
@property
def binary_peers(self):
binary_peers = ''
for peer in self.peers:
ip = peer.split(':')[0]
port = peer.split(':')[1]
ip = struct.unpack("!I", socket.inet_aton(ip))[0]
binary_peers += struct.pack('!ih', ip, int(port))
return binary_peers
| Implement binary representation of peers | Implement binary representation of peers
| Python | apache-2.0 | vtemian/university_projects,vtemian/university_projects,vtemian/university_projects | import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else []
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else []
Implement binary representation of peers | import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else 0
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else 0
@property
def binary_peers(self):
binary_peers = ''
for peer in self.peers:
ip = peer.split(':')[0]
port = peer.split(':')[1]
ip = struct.unpack("!I", socket.inet_aton(ip))[0]
binary_peers += struct.pack('!ih', ip, int(port))
return binary_peers
| <commit_before>import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else []
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else []
<commit_msg>Implement binary representation of peers<commit_after> | import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else 0
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else 0
@property
def binary_peers(self):
binary_peers = ''
for peer in self.peers:
ip = peer.split(':')[0]
port = peer.split(':')[1]
ip = struct.unpack("!I", socket.inet_aton(ip))[0]
binary_peers += struct.pack('!ih', ip, int(port))
return binary_peers
| import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else []
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else []
Implement binary representation of peersimport struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else 0
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else 0
@property
def binary_peers(self):
binary_peers = ''
for peer in self.peers:
ip = peer.split(':')[0]
port = peer.split(':')[1]
ip = struct.unpack("!I", socket.inet_aton(ip))[0]
binary_peers += struct.pack('!ih', ip, int(port))
return binary_peers
| <commit_before>import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else []
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else []
<commit_msg>Implement binary representation of peers<commit_after>import struct
import socket
import time
from trackpy.vendors.redis import redis
class Torrent(object):
def __init__(self, info_hash):
self.info = redis.hgetall(info_hash)
self.info_hash = info_hash
def can_announce(self, peer_id):
timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0)
if not timestamp:
return True
now = int(time.time())
return False if now - timestamp > 5 * 60 else True
def set_announce(self, peer_id):
redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time()))
@property
def peers(self):
return redis.smembers('%s_peers' % self.info_hash)
@peers.setter
def peers(self, peer):
redis.sadd('%s_peers' % self.info_hash, peer)
@property
def seeders(self):
return self.info['seeders'] if 'seeders' in self.info else 0
@property
def leechers(self):
return self.info['leecher'] if 'leechers' in self.info else 0
@property
def binary_peers(self):
binary_peers = ''
for peer in self.peers:
ip = peer.split(':')[0]
port = peer.split(':')[1]
ip = struct.unpack("!I", socket.inet_aton(ip))[0]
binary_peers += struct.pack('!ih', ip, int(port))
return binary_peers
|
f52c1f6b91bf9d1748d4254b33634394ec07df3f | markups/common.py | markups/common.py | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| Update MathJax URL to 2.7.4 | Update MathJax URL to 2.7.4
| Python | bsd-3-clause | mitya57/pymarkups,retext-project/pymarkups | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Update MathJax URL to 2.7.4 | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| <commit_before># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Update MathJax URL to 2.7.4<commit_after> | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Update MathJax URL to 2.7.4# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| <commit_before># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Update MathJax URL to 2.7.4<commit_after># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.4/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
be958ec64ec82b2687376058c90ce2465bfbc84a | twitter_explorer/__init__.py | twitter_explorer/__init__.py | from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
| from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET', 'POST'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
| Index endpoint now accepts POST | Index endpoint now accepts POST
| Python | mit | ysenko/python-from-zero-to-hero,ysenko/python-from-zero-to-hero | from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
Index endpoint now accepts POST | from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET', 'POST'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
| <commit_before>from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
<commit_msg>Index endpoint now accepts POST<commit_after> | from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET', 'POST'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
| from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
Index endpoint now accepts POSTfrom application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET', 'POST'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
| <commit_before>from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
<commit_msg>Index endpoint now accepts POST<commit_after>from application import app, db, bcrypt, login_manager
from utils import render_template, user_loader
from twitter_explorer.handlers import login, index, config
login_manager.login_view = 'login'
login_manager.user_loader(user_loader)
# Routes.
app.add_url_rule('/login', 'login', login.login, methods=['GET', 'POST'])
app.add_url_rule('/signup', 'signup', login.register, methods=['GET', 'POST'])
app.add_url_rule('/logout', 'logout', login.logout, methods=['GET'])
app.add_url_rule('/config', 'config', config.config, methods=['GET', 'POST'])
app.add_url_rule('/', 'index', index.index, methods=['GET', 'POST'])
# Debug only.
if __name__ == '__main__':
host = app.config.get('HOST', '127.0.0.1')
port = app.config.get('PORT', 5000)
app.run(host=host, port=port, debug=True)
|
8b3ad1ea09c1fd8f2d433e79497eb51d9f32f4c7 | osversion.py | osversion.py | #!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-07-06T22:54:44+0200
from ctypes import CDLL
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
| #!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-08-19T14:18:16+0200
"""Print the __FreeBSD_version. This is also called OSVERSION in scripts."""
from ctypes import CDLL
import sys
if 'freebsd' not in sys.platform:
print('This script only works on FreeBSD!')
sys.exit(1)
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
| Print a warning and exit when not run on FreeBSD. | Print a warning and exit when not run on FreeBSD.
| Python | mit | rsmith-nl/scripts,rsmith-nl/scripts | #!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-07-06T22:54:44+0200
from ctypes import CDLL
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
Print a warning and exit when not run on FreeBSD. | #!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-08-19T14:18:16+0200
"""Print the __FreeBSD_version. This is also called OSVERSION in scripts."""
from ctypes import CDLL
import sys
if 'freebsd' not in sys.platform:
print('This script only works on FreeBSD!')
sys.exit(1)
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
| <commit_before>#!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-07-06T22:54:44+0200
from ctypes import CDLL
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
<commit_msg>Print a warning and exit when not run on FreeBSD.<commit_after> | #!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-08-19T14:18:16+0200
"""Print the __FreeBSD_version. This is also called OSVERSION in scripts."""
from ctypes import CDLL
import sys
if 'freebsd' not in sys.platform:
print('This script only works on FreeBSD!')
sys.exit(1)
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
| #!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-07-06T22:54:44+0200
from ctypes import CDLL
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
Print a warning and exit when not run on FreeBSD.#!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-08-19T14:18:16+0200
"""Print the __FreeBSD_version. This is also called OSVERSION in scripts."""
from ctypes import CDLL
import sys
if 'freebsd' not in sys.platform:
print('This script only works on FreeBSD!')
sys.exit(1)
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
| <commit_before>#!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-07-06T22:54:44+0200
from ctypes import CDLL
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
<commit_msg>Print a warning and exit when not run on FreeBSD.<commit_after>#!/usr/bin/env python3
# file: osversion.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <rsmith@xs4all.nl>
# Created: 2018-04-06 22:34:00 +0200
# Last modified: 2018-08-19T14:18:16+0200
"""Print the __FreeBSD_version. This is also called OSVERSION in scripts."""
from ctypes import CDLL
import sys
if 'freebsd' not in sys.platform:
print('This script only works on FreeBSD!')
sys.exit(1)
with open('/usr/include/osreldate.h') as h:
lines = h.readlines()
line = [ln for ln in lines if ln.startswith('#define')][0]
print('Compilation environment version:', line.split()[-1])
libc = CDLL("/lib/libc.so.7")
print('Execution environment version:', libc.getosreldate())
|
d17f14b693d770618de559bd71a277c3771ccc8e | src/events/views.py | src/events/views.py | from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
| from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
permanent = True
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
| Make sponsored event redirect permanent (HTTP 301) | Make sponsored event redirect permanent (HTTP 301)
| Python | mit | pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016 | from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
Make sponsored event redirect permanent (HTTP 301) | from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
permanent = True
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
| <commit_before>from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
<commit_msg>Make sponsored event redirect permanent (HTTP 301)<commit_after> | from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
permanent = True
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
| from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
Make sponsored event redirect permanent (HTTP 301)from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
permanent = True
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
| <commit_before>from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
<commit_msg>Make sponsored event redirect permanent (HTTP 301)<commit_after>from django.http import Http404
from django.views.generic import DetailView, ListView, RedirectView
from proposals.models import TalkProposal
from .models import SponsoredEvent
class AcceptedTalkMixin:
queryset = (
TalkProposal.objects
.filter(accepted=True)
.select_related('submitter')
.order_by('title')
)
class TalkListView(AcceptedTalkMixin, ListView):
template_name = 'events/talk_list.html'
def get_context_data(self, **kwargs):
sponsored_events = (
SponsoredEvent.objects
.select_related('host')
.order_by('title')
)
return super().get_context_data(
sponsored_events=sponsored_events,
**kwargs
)
class TalkDetailView(AcceptedTalkMixin, DetailView):
template_name = 'events/talk_detail.html'
class SponsoredEventRedirectView(RedirectView):
permanent = True
def get_redirect_url(self, pk):
try:
event = SponsoredEvent.objects.get(pk=pk)
except SponsoredEvent.DoesNotExist:
raise Http404
return event.get_absolute_url()
class SponsoredEventDetailView(DetailView):
model = SponsoredEvent
template_name = 'events/sponsored_event_detail.html'
def get_queryset(self):
"""Fetch user relation before-hand because we'll need it.
"""
return super().get_queryset().select_related('host')
|
8cbac87d73f361bd6d623cbe58d188dd9cc518ce | ext_pylib/input/__init__.py | ext_pylib/input/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 3 input if possible
try:
INPUT = input
except NameError:
INPUT = raw_input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 2 input unless raw_input doesn't exist
try:
INPUT = raw_input
except NameError:
INPUT = input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
| Use raw_input [py2] first, then resort to input [py3]. | BUGFIX: Use raw_input [py2] first, then resort to input [py3].
| Python | mit | hbradleyiii/ext_pylib | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 3 input if possible
try:
INPUT = input
except NameError:
INPUT = raw_input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
BUGFIX: Use raw_input [py2] first, then resort to input [py3]. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 2 input unless raw_input doesn't exist
try:
INPUT = raw_input
except NameError:
INPUT = input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 3 input if possible
try:
INPUT = input
except NameError:
INPUT = raw_input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
<commit_msg>BUGFIX: Use raw_input [py2] first, then resort to input [py3].<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 2 input unless raw_input doesn't exist
try:
INPUT = raw_input
except NameError:
INPUT = input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 3 input if possible
try:
INPUT = input
except NameError:
INPUT = raw_input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
BUGFIX: Use raw_input [py2] first, then resort to input [py3].#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 2 input unless raw_input doesn't exist
try:
INPUT = raw_input
except NameError:
INPUT = input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 3 input if possible
try:
INPUT = input
except NameError:
INPUT = raw_input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
<commit_msg>BUGFIX: Use raw_input [py2] first, then resort to input [py3].<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
ext_pylib.input
~~~~~~~~~~~~~~~~
Functions for displaying and handling input on the terminal.
"""
from __future__ import absolute_import
# Use Python 2 input unless raw_input doesn't exist
try:
INPUT = raw_input
except NameError:
INPUT = input
# pylint: disable=wrong-import-position
# this import MUST be after INPUT is defined
from .prompts import prompt, prompt_str, warn_prompt
|
99354752661828451732fdf317c5f2742a86733e | api/base/exceptions.py | api/base/exceptions.py |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {key: reason}})
else:
errors.append({'source': {key: value}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': reason})
else:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': value})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
| Change source to point to which part of the request caused the error, and add the specific reason to detail | Change source to point to which part of the request caused the error, and add the specific reason to detail
| Python | apache-2.0 | aaxelb/osf.io,brianjgeiger/osf.io,felliott/osf.io,chennan47/osf.io,mluo613/osf.io,crcresearch/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,ticklemepierce/osf.io,kwierman/osf.io,samchrisinger/osf.io,brianjgeiger/osf.io,icereval/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,erinspace/osf.io,arpitar/osf.io,arpitar/osf.io,abought/osf.io,caseyrygt/osf.io,ticklemepierce/osf.io,baylee-d/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,felliott/osf.io,emetsger/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,abought/osf.io,hmoco/osf.io,asanfilippo7/osf.io,arpitar/osf.io,caseyrollins/osf.io,jnayak1/osf.io,GageGaskins/osf.io,alexschiller/osf.io,pattisdr/osf.io,acshi/osf.io,RomanZWang/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,acshi/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,haoyuchen1992/osf.io,SSJohns/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,amyshi188/osf.io,Johnetordoff/osf.io,erinspace/osf.io,Nesiehr/osf.io,cslzchen/osf.io,abought/osf.io,brianjgeiger/osf.io,chennan47/osf.io,SSJohns/osf.io,samchrisinger/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,saradbowman/osf.io,wearpants/osf.io,njantrania/osf.io,DanielSBrown/osf.io,mluke93/osf.io,cosenal/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,jnayak1/osf.io,ZobairAlijan/osf.io,caseyrygt/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,petermalcolm/osf.io,laurenrevere/osf.io,icereval/osf.io,KAsante95/osf.io,zamattiac/osf.io,felliott/osf.io,jnayak1/osf.io,jnayak1/osf.io,brandonPurvis/osf.io,kwierman/osf.io,samanehsan/osf.io,hmoco/osf.io,adlius/osf.io,billyhunt/osf.io,mattclark/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,haoyuchen1992/osf.io,kch8qx/osf.io,kch8qx/osf.io,mluo613/osf.io,monikagrabowska/osf.io,haoyuchen1992/osf.io,baylee-d/osf.io,samanehsan/osf.io,mluke93/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,Ghalko/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,SSJohns/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,petermalcolm/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,pattisdr/osf.io,ZobairAlijan/osf.io,TomBaxter/osf.io,chrisseto/osf.io,emetsger/osf.io,zamattiac/osf.io,doublebits/osf.io,mfraezz/osf.io,emetsger/osf.io,Ghalko/osf.io,billyhunt/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,KAsante95/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,acshi/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,Johnetordoff/osf.io,danielneis/osf.io,Ghalko/osf.io,Ghalko/osf.io,mluo613/osf.io,sloria/osf.io,njantrania/osf.io,pattisdr/osf.io,kch8qx/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,amyshi188/osf.io,chennan47/osf.io,asanfilippo7/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,TomBaxter/osf.io,TomBaxter/osf.io,danielneis/osf.io,amyshi188/osf.io,mfraezz/osf.io,ZobairAlijan/osf.io,Nesiehr/osf.io,zamattiac/osf.io,acshi/osf.io,wearpants/osf.io,rdhyee/osf.io,alexschiller/osf.io,zachjanicki/osf.io,amyshi188/osf.io,binoculars/osf.io,mluo613/osf.io,aaxelb/osf.io,kch8qx/osf.io,njantrania/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,danielneis/osf.io,wearpants/osf.io,cslzchen/osf.io,danielneis/osf.io,sloria/osf.io,adlius/osf.io,binoculars/osf.io,saradbowman/osf.io,mattclark/osf.io,caneruguz/osf.io,arpitar/osf.io,aaxelb/osf.io,doublebits/osf.io,crcresearch/osf.io,doublebits/osf.io,mattclark/osf.io,cosenal/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,adlius/osf.io,alexschiller/osf.io,crcresearch/osf.io,alexschiller/osf.io,leb2dg/osf.io,GageGaskins/osf.io,zamattiac/osf.io,chrisseto/osf.io,kch8qx/osf.io,ticklemepierce/osf.io,DanielSBrown/osf.io,icereval/osf.io,laurenrevere/osf.io,cosenal/osf.io,DanielSBrown/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,petermalcolm/osf.io,wearpants/osf.io,doublebits/osf.io,billyhunt/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,erinspace/osf.io,mluke93/osf.io,kwierman/osf.io,alexschiller/osf.io,hmoco/osf.io,chrisseto/osf.io,mfraezz/osf.io,TomHeatwole/osf.io,HalcyonChimera/osf.io,cosenal/osf.io,emetsger/osf.io,zachjanicki/osf.io,caseyrygt/osf.io,adlius/osf.io,caneruguz/osf.io,binoculars/osf.io,brandonPurvis/osf.io,sloria/osf.io,samanehsan/osf.io,baylee-d/osf.io,rdhyee/osf.io,abought/osf.io,cslzchen/osf.io,felliott/osf.io,mfraezz/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,kwierman/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,mluke93/osf.io,rdhyee/osf.io,rdhyee/osf.io,njantrania/osf.io,KAsante95/osf.io,caseyrollins/osf.io,caneruguz/osf.io,samanehsan/osf.io,leb2dg/osf.io,aaxelb/osf.io,mluo613/osf.io |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {key: reason}})
else:
errors.append({'source': {key: value}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Change source to point to which part of the request caused the error, and add the specific reason to detail |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': reason})
else:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': value})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
| <commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {key: reason}})
else:
errors.append({'source': {key: value}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Change source to point to which part of the request caused the error, and add the specific reason to detail<commit_after> |
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': reason})
else:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': value})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {key: reason}})
else:
errors.append({'source': {key: value}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
Change source to point to which part of the request caused the error, and add the specific reason to detail
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': reason})
else:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': value})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
| <commit_before>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {key: reason}})
else:
errors.append({'source': {key: value}})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
<commit_msg>Change source to point to which part of the request caused the error, and add the specific reason to detail<commit_after>
from rest_framework import status
from rest_framework.exceptions import APIException
def json_api_exception_handler(exc, context):
""" Custom exception handler that returns errors object as an array """
# Import inside method to avoid errors when the OSF is loaded without Django
from rest_framework.views import exception_handler
response = exception_handler(exc, context)
# Error objects may have the following members. Title removed to avoid clash with node "title" errors.
top_level_error_keys = ['id', 'links', 'status', 'code', 'detail', 'source', 'meta']
errors = []
if response:
message = response.data
if isinstance(message, dict):
for key, value in message.iteritems():
if key in top_level_error_keys:
errors.append({key: value})
else:
if isinstance(value, list):
for reason in value:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': reason})
else:
errors.append({'source': {'pointer': '/data/attributes/' + key}, 'detail': value})
elif isinstance(message, (list, tuple)):
for error in message:
errors.append({'detail': error})
else:
errors.append({'detail': message})
response.data = {'errors': errors}
return response
# Custom Exceptions the Django Rest Framework does not support
class Gone(APIException):
status_code = status.HTTP_410_GONE
default_detail = ('The requested resource is no longer available.')
|
0667b4b1c8660feb2c1bdf09562fe51700db620b | app/models/__init__.py | app/models/__init__.py | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
| # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
UNKNOWN_STATUS = 'UNKNOWN'
# Build file names.
DONE_FILE = '.done'
BUILD_META_FILE = 'build.meta'
BUILD_FAIL_FILE = 'build.FAIL'
BUILD_PASS_FILE = 'build.PASS'
| Add new status (unknown), and necessary build files. | Add new status (unknown), and necessary build files.
| Python | lgpl-2.1 | kernelci/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend,joyxu/kernelci-backend | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
Add new status (unknown), and necessary build files. | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
UNKNOWN_STATUS = 'UNKNOWN'
# Build file names.
DONE_FILE = '.done'
BUILD_META_FILE = 'build.meta'
BUILD_FAIL_FILE = 'build.FAIL'
BUILD_PASS_FILE = 'build.PASS'
| <commit_before># Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
<commit_msg>Add new status (unknown), and necessary build files.<commit_after> | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
UNKNOWN_STATUS = 'UNKNOWN'
# Build file names.
DONE_FILE = '.done'
BUILD_META_FILE = 'build.meta'
BUILD_FAIL_FILE = 'build.FAIL'
BUILD_PASS_FILE = 'build.PASS'
| # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
Add new status (unknown), and necessary build files.# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
UNKNOWN_STATUS = 'UNKNOWN'
# Build file names.
DONE_FILE = '.done'
BUILD_META_FILE = 'build.meta'
BUILD_FAIL_FILE = 'build.FAIL'
BUILD_PASS_FILE = 'build.PASS'
| <commit_before># Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
<commit_msg>Add new status (unknown), and necessary build files.<commit_after># Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The default mongodb database name.
DB_NAME = 'kernel-ci'
# The default ID key for mongodb documents.
ID_KEY = '_id'
# Job and/or build status.
BUILDING_STATUS = 'BUILDING'
DONE_STATUS = 'DONE'
FAILED_STATUS = 'FAILED'
SUCCESS_STATUS = 'SUCCESS'
UNKNOWN_STATUS = 'UNKNOWN'
# Build file names.
DONE_FILE = '.done'
BUILD_META_FILE = 'build.meta'
BUILD_FAIL_FILE = 'build.FAIL'
BUILD_PASS_FILE = 'build.PASS'
|
624ded5833e25d6a51671255fd32208ff947b22f | sconsole/manager.py | sconsole/manager.py | # Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body(),
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
| # Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body_frame = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body_frame,
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
| Fix refs to body of sconsole | Fix refs to body of sconsole
| Python | apache-2.0 | saltstack/salt-console | # Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body(),
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
Fix refs to body of sconsole | # Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body_frame = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body_frame,
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
| <commit_before># Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body(),
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
<commit_msg>Fix refs to body of sconsole<commit_after> | # Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body_frame = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body_frame,
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
| # Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body(),
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
Fix refs to body of sconsole# Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body_frame = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body_frame,
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
| <commit_before># Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body(),
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
<commit_msg>Fix refs to body of sconsole<commit_after># Import third party libs
import urwid
# Import sconsole libs
import sconsole.cmdbar
import sconsole.static
FOOTER = [
('title', 'Salt Console'), ' ',
('key', 'UP'), ' ',
('key', 'DOWN'), ' ']
class Manager(object):
def __init__(self, opts):
self.opts = opts
self.cmdbar = sconsole.cmdbar.CommandBar(self.opts)
self.header = urwid.LineBox(urwid.Text(('banner', 'Salt Console'), align='center'))
self.body_frame = self.body()
self.footer = urwid.AttrMap(urwid.Text(FOOTER), 'banner')
self.view = urwid.Frame(
body=self.body_frame,
header=self.header,
footer=self.footer)
def body(self):
dump = urwid.Filler(urwid.Text('', align='left'), valign='top')
return urwid.Frame(dump, header=self.cmdbar.grid)
def unhandled_input(self, key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
def start(self):
palette = sconsole.static.get_palette(
self.opts.get('theme', 'std')
)
loop = urwid.MainLoop(
self.view,
palette=palette,
unhandled_input=self.unhandled_input)
loop.run()
|
5b0e750c70759f79e6ea5dce051f1c29726ac71c | scoreboard/tests.py | scoreboard/tests.py | from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(1, self.game.visiting_score())
| from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(0, self.game.visiting_score())
def test_home_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.home_team,
player_number=26
)
self.assertEqual(1, self.game.home_score())
self.assertEqual(0, self.game.visiting_score())
def test_away_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.visiting_team,
player_number=26
)
self.assertEqual(0, self.game.home_score())
self.assertEqual(1, self.game.visiting_score())
| Fix failing test and add 2 more | Fix failing test and add 2 more
| Python | mit | jspitzen/lacrosse_scoreboard,jspitzen/lacrosse_scoreboard | from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(1, self.game.visiting_score())
Fix failing test and add 2 more | from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(0, self.game.visiting_score())
def test_home_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.home_team,
player_number=26
)
self.assertEqual(1, self.game.home_score())
self.assertEqual(0, self.game.visiting_score())
def test_away_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.visiting_team,
player_number=26
)
self.assertEqual(0, self.game.home_score())
self.assertEqual(1, self.game.visiting_score())
| <commit_before>from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(1, self.game.visiting_score())
<commit_msg>Fix failing test and add 2 more<commit_after> | from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(0, self.game.visiting_score())
def test_home_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.home_team,
player_number=26
)
self.assertEqual(1, self.game.home_score())
self.assertEqual(0, self.game.visiting_score())
def test_away_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.visiting_team,
player_number=26
)
self.assertEqual(0, self.game.home_score())
self.assertEqual(1, self.game.visiting_score())
| from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(1, self.game.visiting_score())
Fix failing test and add 2 morefrom django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(0, self.game.visiting_score())
def test_home_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.home_team,
player_number=26
)
self.assertEqual(1, self.game.home_score())
self.assertEqual(0, self.game.visiting_score())
def test_away_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.visiting_team,
player_number=26
)
self.assertEqual(0, self.game.home_score())
self.assertEqual(1, self.game.visiting_score())
| <commit_before>from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(1, self.game.visiting_score())
<commit_msg>Fix failing test and add 2 more<commit_after>from django.test import TestCase
from django.utils import timezone as tz
from scoreboard.models import *
class GameUnitTests(TestCase):
@classmethod
def setUpClass(cls):
super(GameUnitTests, cls).setUpClass()
cls.home_team = Team.objects.create(name='Home')
cls.visiting_team = Team.objects.create(name='Away')
cls.game = Game.objects.create(home_team=cls.home_team, visiting_team=cls.visiting_team, time=tz.now())
def test_home_score_zero(self):
self.assertEqual(0, self.game.home_score())
def test_away_score_zero(self):
self.assertEqual(0, self.game.visiting_score())
def test_home_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.home_team,
player_number=26
)
self.assertEqual(1, self.game.home_score())
self.assertEqual(0, self.game.visiting_score())
def test_away_score_one(self):
Goal.objects.create(game=self.game,
game_time_minutes=1,
game_time_seconds=1,
team=self.visiting_team,
player_number=26
)
self.assertEqual(0, self.game.home_score())
self.assertEqual(1, self.game.visiting_score())
|
a72b75f67753985a4559d4675393042d9d363a02 | media/sites/lapinkansa.py | media/sites/lapinkansa.py | import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='Teksti' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
| import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
| Fix to new page layout | Fix to new page layout
| Python | mit | HIIT/digivaalit-2015,HIIT/digivaalit-2015,HIIT/digivaalit-2015 | import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='Teksti' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
Fix to new page layout | import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
| <commit_before>import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='Teksti' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
<commit_msg>Fix to new page layout<commit_after> | import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
| import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='Teksti' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
Fix to new page layoutimport requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
| <commit_before>import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='Teksti' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
<commit_msg>Fix to new page layout<commit_after>import requests
from bs4 import BeautifulSoup
def nouda( url , out ):
r = requests.get( url )
r.encoding = 'UTF-8'
soup = BeautifulSoup( r.text )
teksti = soup.find_all( class_ ='post-meta' )
for string in teksti[0].stripped_strings:
out.write( string.encode('utf8') + ' ' )
if __name__ == '__main__':
nouda("http://www.lapinkansa.fi/Lappi/1194944697007/artikkeli/kaunis+tykky+voi+olla+kavala+puille.html", file('lapinkansa.txt', 'w'))
|
9387dfd4cc39fa6fbbf66147ced880dffa6408bd | keystone/server/flask/__init__.py | keystone/server/flask/__init__.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(morgan): Import relevant stuff so importing individual under-pinnings
# isn't needed, keystone.server.flask exposes all the interesting bits
# needed to develop restful APIs for keystone.
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
| Make keystone.server.flask more interesting for importing | Make keystone.server.flask more interesting for importing
Importing keystone.server.flask now exposes all the relevant bits
from the sub modules to develop APIs without needing to understand
all the underlying modules. __all__ has also be setup in a meaningful
way to allow for `from keystone.server.flask import *` and have
all the needed objects to start developing APIs for keystone.
Change-Id: Iab22cabb71c6690e6ffb0c9de68ed8437c4848de
Partial-Bug: #1776504
| Python | apache-2.0 | openstack/keystone,openstack/keystone,mahak/keystone,openstack/keystone,mahak/keystone,mahak/keystone | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
Make keystone.server.flask more interesting for importing
Importing keystone.server.flask now exposes all the relevant bits
from the sub modules to develop APIs without needing to understand
all the underlying modules. __all__ has also be setup in a meaningful
way to allow for `from keystone.server.flask import *` and have
all the needed objects to start developing APIs for keystone.
Change-Id: Iab22cabb71c6690e6ffb0c9de68ed8437c4848de
Partial-Bug: #1776504 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(morgan): Import relevant stuff so importing individual under-pinnings
# isn't needed, keystone.server.flask exposes all the interesting bits
# needed to develop restful APIs for keystone.
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
<commit_msg>Make keystone.server.flask more interesting for importing
Importing keystone.server.flask now exposes all the relevant bits
from the sub modules to develop APIs without needing to understand
all the underlying modules. __all__ has also be setup in a meaningful
way to allow for `from keystone.server.flask import *` and have
all the needed objects to start developing APIs for keystone.
Change-Id: Iab22cabb71c6690e6ffb0c9de68ed8437c4848de
Partial-Bug: #1776504<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(morgan): Import relevant stuff so importing individual under-pinnings
# isn't needed, keystone.server.flask exposes all the interesting bits
# needed to develop restful APIs for keystone.
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
Make keystone.server.flask more interesting for importing
Importing keystone.server.flask now exposes all the relevant bits
from the sub modules to develop APIs without needing to understand
all the underlying modules. __all__ has also be setup in a meaningful
way to allow for `from keystone.server.flask import *` and have
all the needed objects to start developing APIs for keystone.
Change-Id: Iab22cabb71c6690e6ffb0c9de68ed8437c4848de
Partial-Bug: #1776504# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(morgan): Import relevant stuff so importing individual under-pinnings
# isn't needed, keystone.server.flask exposes all the interesting bits
# needed to develop restful APIs for keystone.
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystone.server.flask.core import * # noqa
from keystone.server.flask import application # noqa
__all__ = ('application', 'core', 'fail_gracefully', 'initialize_application',
'setup_app_middleware')
fail_gracefully = application.fail_gracefully
<commit_msg>Make keystone.server.flask more interesting for importing
Importing keystone.server.flask now exposes all the relevant bits
from the sub modules to develop APIs without needing to understand
all the underlying modules. __all__ has also be setup in a meaningful
way to allow for `from keystone.server.flask import *` and have
all the needed objects to start developing APIs for keystone.
Change-Id: Iab22cabb71c6690e6ffb0c9de68ed8437c4848de
Partial-Bug: #1776504<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(morgan): Import relevant stuff so importing individual under-pinnings
# isn't needed, keystone.server.flask exposes all the interesting bits
# needed to develop restful APIs for keystone.
from keystone.server.flask import application
from keystone.server.flask.common import APIBase # noqa
from keystone.server.flask.common import base_url # noqa
from keystone.server.flask.common import construct_json_home_data # noqa
from keystone.server.flask.common import construct_resource_map # noqa
from keystone.server.flask.common import full_url # noqa
from keystone.server.flask.common import JsonHomeData # noqa
from keystone.server.flask.common import ResourceBase # noqa
from keystone.server.flask.common import ResourceMap # noqa
from keystone.server.flask.core import * # noqa
# NOTE(morgan): This allows for from keystone.flask import * and have all the
# cool stuff needed to develop new APIs within a module/subsystem
__all__ = ('APIBase', 'JsonHomeData', 'ResourceBase', 'ResourceMap',
'base_url', 'construct_json_home_data', 'construct_resource_map',
'full_url', 'fail_gracefully')
application_factory = application.application_factory
fail_gracefully = application.fail_gracefully
|
db374d1cc7188b4102ff727d5edfe458b6533584 | markdown_for_squirrel/__init__.py | markdown_for_squirrel/__init__.py | import markdown
def markdownify_content(context):
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
| import markdown
import helpers
logger = helpers.get_logger(__name__)
def markdownify_content(context):
if context["is_called_from_cli"]:
arg_parser = context["arg_parser"]
args = arg_parser.parse_args()
if args.action == "generate":
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
logger.debug("Parsing content to Markdown for `{}` article..."
.format(article["title"]))
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
| Fix markdown plugin so it's run only when needed | Fix markdown plugin so it's run only when needed
| Python | mit | daGrevis/squirrel | import markdown
def markdownify_content(context):
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
Fix markdown plugin so it's run only when needed | import markdown
import helpers
logger = helpers.get_logger(__name__)
def markdownify_content(context):
if context["is_called_from_cli"]:
arg_parser = context["arg_parser"]
args = arg_parser.parse_args()
if args.action == "generate":
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
logger.debug("Parsing content to Markdown for `{}` article..."
.format(article["title"]))
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
| <commit_before>import markdown
def markdownify_content(context):
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
<commit_msg>Fix markdown plugin so it's run only when needed<commit_after> | import markdown
import helpers
logger = helpers.get_logger(__name__)
def markdownify_content(context):
if context["is_called_from_cli"]:
arg_parser = context["arg_parser"]
args = arg_parser.parse_args()
if args.action == "generate":
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
logger.debug("Parsing content to Markdown for `{}` article..."
.format(article["title"]))
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
| import markdown
def markdownify_content(context):
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
Fix markdown plugin so it's run only when neededimport markdown
import helpers
logger = helpers.get_logger(__name__)
def markdownify_content(context):
if context["is_called_from_cli"]:
arg_parser = context["arg_parser"]
args = arg_parser.parse_args()
if args.action == "generate":
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
logger.debug("Parsing content to Markdown for `{}` article..."
.format(article["title"]))
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
| <commit_before>import markdown
def markdownify_content(context):
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
<commit_msg>Fix markdown plugin so it's run only when needed<commit_after>import markdown
import helpers
logger = helpers.get_logger(__name__)
def markdownify_content(context):
if context["is_called_from_cli"]:
arg_parser = context["arg_parser"]
args = arg_parser.parse_args()
if args.action == "generate":
for article in context["articles"]:
article["content"] = markdown.markdown(article["content"])
logger.debug("Parsing content to Markdown for `{}` article..."
.format(article["title"]))
return context
def inject_middlewares(middlewares):
middlewares.add("markdownify_content", markdownify_content)
return middlewares
|
0245952bacb6a2c7cd20fb7758d90301776c4ab4 | armstrong/apps/embedded_video/models.py | armstrong/apps/embedded_video/models.py | from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.contrib.sites.models import Site
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
sites = models.ManyToManyField(Site)
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
| from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
| Remove sites from explicitly being defined (thanks @niran for pointing it out) | Remove sites from explicitly being defined (thanks @niran for pointing it out)
| Python | apache-2.0 | armstrong/armstrong.apps.embedded_video | from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.contrib.sites.models import Site
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
sites = models.ManyToManyField(Site)
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
Remove sites from explicitly being defined (thanks @niran for pointing it out) | from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
| <commit_before>from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.contrib.sites.models import Site
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
sites = models.ManyToManyField(Site)
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
<commit_msg>Remove sites from explicitly being defined (thanks @niran for pointing it out)<commit_after> | from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
| from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.contrib.sites.models import Site
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
sites = models.ManyToManyField(Site)
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
Remove sites from explicitly being defined (thanks @niran for pointing it out)from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
| <commit_before>from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.contrib.sites.models import Site
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
sites = models.ManyToManyField(Site)
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
<commit_msg>Remove sites from explicitly being defined (thanks @niran for pointing it out)<commit_after>from armstrong.apps.content.models import Content
from armstrong.core.arm_content.mixins import AuthorsMixin
from armstrong.core.arm_content.mixins import EmbeddedVideoMixin
from armstrong.core.arm_content.fields import EmbeddedVideoField
from django.db import models
from . import settings
class EmbeddedVideoBase(Content, EmbeddedVideoMixin, models.Model):
aspect_ratio = models.CharField(max_length=5,
choices=settings.ASPECT_RATIOS,
default=settings.DEFAULT_ASPECT_RATIO)
# TODO: screen cap thumbnail
screencap_url = models.URLField()
class Meta:
abstract = True
class EmbeddedVideo(EmbeddedVideoBase):
pass
|
31ea614e783273ef14919d1628a7ada11e8850fd | apps/users/adapters.py | apps/users/adapters.py | import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send_sync(
user,
template_name=template_prefix,
**context
)
| import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from adhocracy4.emails.mixins import SyncEmailMixin
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email, SyncEmailMixin):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send(
user,
template_name=template_prefix,
**context
)
| Use SyncEmailMixin for account mails | Use SyncEmailMixin for account mails
| Python | agpl-3.0 | liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin,liqd/a4-meinberlin | import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send_sync(
user,
template_name=template_prefix,
**context
)
Use SyncEmailMixin for account mails | import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from adhocracy4.emails.mixins import SyncEmailMixin
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email, SyncEmailMixin):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send(
user,
template_name=template_prefix,
**context
)
| <commit_before>import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send_sync(
user,
template_name=template_prefix,
**context
)
<commit_msg>Use SyncEmailMixin for account mails<commit_after> | import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from adhocracy4.emails.mixins import SyncEmailMixin
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email, SyncEmailMixin):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send(
user,
template_name=template_prefix,
**context
)
| import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send_sync(
user,
template_name=template_prefix,
**context
)
Use SyncEmailMixin for account mailsimport re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from adhocracy4.emails.mixins import SyncEmailMixin
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email, SyncEmailMixin):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send(
user,
template_name=template_prefix,
**context
)
| <commit_before>import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send_sync(
user,
template_name=template_prefix,
**context
)
<commit_msg>Use SyncEmailMixin for account mails<commit_after>import re
from allauth.account.adapter import DefaultAccountAdapter
from django.conf import settings
from adhocracy4.emails.mixins import SyncEmailMixin
from apps.contrib.emails import Email
from apps.users import USERNAME_INVALID_MESSAGE
from apps.users import USERNAME_REGEX
class UserAccountEmail(Email, SyncEmailMixin):
def get_receivers(self):
return [self.object]
@property
def template_name(self):
return self.kwargs['template_name']
def get_context(self):
context = super().get_context()
context['contact_email'] = settings.CONTACT_EMAIL
return context
class AccountAdapter(DefaultAccountAdapter):
username_regex = re.compile(USERNAME_REGEX)
error_messages = dict(
DefaultAccountAdapter.error_messages,
invalid_username=USERNAME_INVALID_MESSAGE
)
def send_mail(self, template_prefix, email, context):
user = context['user']
return UserAccountEmail.send(
user,
template_name=template_prefix,
**context
)
|
f22476a36f2096628dc336f9adf0caa9a827dc11 | jfr_playoff/db.py | jfr_playoff/db.py | import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
| import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(e.errno, str(e), db_name)
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
| Fix for rethrowing mysql.connector.Error as IOError | Fix for rethrowing mysql.connector.Error as IOError
| Python | bsd-2-clause | emkael/jfrteamy-playoff,emkael/jfrteamy-playoff | import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
Fix for rethrowing mysql.connector.Error as IOError | import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(e.errno, str(e), db_name)
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
| <commit_before>import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
<commit_msg>Fix for rethrowing mysql.connector.Error as IOError<commit_after> | import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(e.errno, str(e), db_name)
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
| import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
Fix for rethrowing mysql.connector.Error as IOErrorimport sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(e.errno, str(e), db_name)
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
| <commit_before>import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
<commit_msg>Fix for rethrowing mysql.connector.Error as IOError<commit_after>import sys
class PlayoffDB(object):
db_cursor = None
DATABASE_NOT_CONFIGURED_WARNING = 'WARNING: database not configured'
def __init__(self, settings):
reload(sys)
sys.setdefaultencoding("latin1")
import mysql.connector
self.database = mysql.connector.connect(
user=settings['user'],
password=settings['pass'],
host=settings['host'],
port=settings['port'])
self.db_cursor = self.database.cursor(buffered=True)
def get_cursor(self):
return self.db_cursor
def __execute_query(self, db_name, sql, params):
self.db_cursor.execute(sql.replace('#db#', db_name), params)
def fetch(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
row = self.db_cursor.fetchone()
return row
except mysql.connector.Error as e:
raise IOError(e.errno, str(e), db_name)
def fetch_all(self, db_name, sql, params):
import mysql.connector
try:
self.__execute_query(db_name, sql, params)
results = self.db_cursor.fetchall()
return results
except mysql.connector.Error as e:
raise IOError(
message=str(e), filename=db_name,
errno=e.errno, strerror=str(e))
|
4b719f5c1f5e2e8923ce9d0fca4ffe849bf9ea79 | opps/article/urls.py | opps/article/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'^(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
| Fix article url open new | Fix article url open new
| Python | mit | opps/opps,YACOWS/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,williamroot/opps,jeanmask/opps,jeanmask/opps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'^(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
Fix article url open new | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'^(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
<commit_msg>Fix article url open new<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'^(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
Fix article url open new#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'^(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
<commit_msg>Fix article url open new<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
from django.conf.urls.defaults import patterns, url
from django.conf.urls import include
from opps.article.views import OppsDetail
urlpatterns = patterns('',
url(r'^redactor/', include('redactor.urls')),
url(r'(?P<channel__slug_name>[0-9A-Za-z-_.//]+)/(?P<slug>[\w-]+)$',
OppsDetail.as_view(), name='home'),
)
|
e3b0ccb529dca19bb3882f9caad82dbd965c9ae0 | onnx/__init__.py | onnx/__init__.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has to implement fileno that returns a file descriptor)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if isinstance(obj, str) or (sys.version_info[0] == 2 and
isinstance(obj, unicode_literals.unicode_or_str)):
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
else:
model.ParseFromString(obj.read())
return model
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
| Fix string/file-like object detection in onnx.load | Fix string/file-like object detection in onnx.load
| Python | apache-2.0 | onnx/onnx,onnx/onnx,onnx/onnx,onnx/onnx | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has to implement fileno that returns a file descriptor)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if isinstance(obj, str) or (sys.version_info[0] == 2 and
isinstance(obj, unicode_literals.unicode_or_str)):
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
else:
model.ParseFromString(obj.read())
return model
Fix string/file-like object detection in onnx.load | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has to implement fileno that returns a file descriptor)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if isinstance(obj, str) or (sys.version_info[0] == 2 and
isinstance(obj, unicode_literals.unicode_or_str)):
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
else:
model.ParseFromString(obj.read())
return model
<commit_msg>Fix string/file-like object detection in onnx.load<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has to implement fileno that returns a file descriptor)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if isinstance(obj, str) or (sys.version_info[0] == 2 and
isinstance(obj, unicode_literals.unicode_or_str)):
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
else:
model.ParseFromString(obj.read())
return model
Fix string/file-like object detection in onnx.loadfrom __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has to implement fileno that returns a file descriptor)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if isinstance(obj, str) or (sys.version_info[0] == 2 and
isinstance(obj, unicode_literals.unicode_or_str)):
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
else:
model.ParseFromString(obj.read())
return model
<commit_msg>Fix string/file-like object detection in onnx.load<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .onnx_pb2 import *
import sys
def load(obj):
'''
Loads a binary protobuf that stores onnx graph
@params
Takes a file-like object (has "read" function)
or a string containing a file name
@return ONNX ModelProto object
'''
model = ModelProto()
if hasattr(obj, 'read') and callable(obj.read):
model.ParseFromString(obj.read())
else:
with open(obj, 'rb') as f:
model.ParseFromString(f.read())
return model
|
c22528df06e821936590431db5ba1a424e16f6a0 | debug_toolbar/management/commands/debugsqlshell.py | debug_toolbar/management/commands/debugsqlshell.py | from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = datetime.now() - starttime
print sqlparse.format(raw_sql, reindent=True),
print ' [%.2fms]' % (ms_from_timedelta(execution_time),)
print
util.CursorDebugWrapper = PrintQueryWrapper
| from __future__ import print_function
from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = ms_from_timedelta(datetime.now() - starttime)
formatted_sql = sqlparse.format(raw_sql, reindent=True)
print('%s [%.2fms]' % (formatted_sql, execution_time))
util.CursorDebugWrapper = PrintQueryWrapper
| Replace print statement by print function. | Replace print statement by print function.
| Python | bsd-3-clause | seperman/django-debug-toolbar,guilhermetavares/django-debug-toolbar,stored/django-debug-toolbar,sidja/django-debug-toolbar,Endika/django-debug-toolbar,guilhermetavares/django-debug-toolbar,megcunningham/django-debug-toolbar,pevzi/django-debug-toolbar,peap/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,seperman/django-debug-toolbar,spookylukey/django-debug-toolbar,tim-schilling/django-debug-toolbar,tim-schilling/django-debug-toolbar,barseghyanartur/django-debug-toolbar,megcunningham/django-debug-toolbar,megcunningham/django-debug-toolbar,spookylukey/django-debug-toolbar,barseghyanartur/django-debug-toolbar,ivelum/django-debug-toolbar,seperman/django-debug-toolbar,calvinpy/django-debug-toolbar,spookylukey/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,sidja/django-debug-toolbar,barseghyanartur/django-debug-toolbar,guilhermetavares/django-debug-toolbar,stored/django-debug-toolbar,jazzband/django-debug-toolbar,stored/django-debug-toolbar,pevzi/django-debug-toolbar,peap/django-debug-toolbar,tim-schilling/django-debug-toolbar,Endika/django-debug-toolbar,pevzi/django-debug-toolbar,ivelum/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,peap/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,jazzband/django-debug-toolbar,jazzband/django-debug-toolbar,ivelum/django-debug-toolbar,calvinpy/django-debug-toolbar,Endika/django-debug-toolbar,sidja/django-debug-toolbar,calvinpy/django-debug-toolbar | from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = datetime.now() - starttime
print sqlparse.format(raw_sql, reindent=True),
print ' [%.2fms]' % (ms_from_timedelta(execution_time),)
print
util.CursorDebugWrapper = PrintQueryWrapper
Replace print statement by print function. | from __future__ import print_function
from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = ms_from_timedelta(datetime.now() - starttime)
formatted_sql = sqlparse.format(raw_sql, reindent=True)
print('%s [%.2fms]' % (formatted_sql, execution_time))
util.CursorDebugWrapper = PrintQueryWrapper
| <commit_before>from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = datetime.now() - starttime
print sqlparse.format(raw_sql, reindent=True),
print ' [%.2fms]' % (ms_from_timedelta(execution_time),)
print
util.CursorDebugWrapper = PrintQueryWrapper
<commit_msg>Replace print statement by print function.<commit_after> | from __future__ import print_function
from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = ms_from_timedelta(datetime.now() - starttime)
formatted_sql = sqlparse.format(raw_sql, reindent=True)
print('%s [%.2fms]' % (formatted_sql, execution_time))
util.CursorDebugWrapper = PrintQueryWrapper
| from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = datetime.now() - starttime
print sqlparse.format(raw_sql, reindent=True),
print ' [%.2fms]' % (ms_from_timedelta(execution_time),)
print
util.CursorDebugWrapper = PrintQueryWrapper
Replace print statement by print function.from __future__ import print_function
from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = ms_from_timedelta(datetime.now() - starttime)
formatted_sql = sqlparse.format(raw_sql, reindent=True)
print('%s [%.2fms]' % (formatted_sql, execution_time))
util.CursorDebugWrapper = PrintQueryWrapper
| <commit_before>from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = datetime.now() - starttime
print sqlparse.format(raw_sql, reindent=True),
print ' [%.2fms]' % (ms_from_timedelta(execution_time),)
print
util.CursorDebugWrapper = PrintQueryWrapper
<commit_msg>Replace print statement by print function.<commit_after>from __future__ import print_function
from datetime import datetime
from django.db.backends import util
import sqlparse
from debug_toolbar.utils import ms_from_timedelta
class PrintQueryWrapper(util.CursorDebugWrapper):
def execute(self, sql, params=()):
starttime = datetime.now()
try:
return self.cursor.execute(sql, params)
finally:
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
execution_time = ms_from_timedelta(datetime.now() - starttime)
formatted_sql = sqlparse.format(raw_sql, reindent=True)
print('%s [%.2fms]' % (formatted_sql, execution_time))
util.CursorDebugWrapper = PrintQueryWrapper
|
371be140dfbecff72d72cda580cd299badc6bc15 | aws_list_all/client.py | aws_list_all/client.py | import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| Use us-east-1 to query route53 | Use us-east-1 to query route53
Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1.
This makes various listings now work, but not record sets.
Updates #4.
| Python | mit | JohannesEbke/aws_list_all | import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
Use us-east-1 to query route53
Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1.
This makes various listings now work, but not record sets.
Updates #4. | import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| <commit_before>import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
<commit_msg>Use us-east-1 to query route53
Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1.
This makes various listings now work, but not record sets.
Updates #4.<commit_after> | import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
Use us-east-1 to query route53
Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1.
This makes various listings now work, but not record sets.
Updates #4.import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| <commit_before>import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
<commit_msg>Use us-east-1 to query route53
Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1.
This makes various listings now work, but not record sets.
Updates #4.<commit_after>import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
|
945aba9548b92f57fc25f9996bfa9c3811e64deb | server/resources.py | server/resources.py | from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| Change single Lecture query to use first() in stead of all() | Change single Lecture query to use first() in stead of all()
| Python | mit | MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS | from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
Change single Lecture query to use first() in stead of all() | from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| <commit_before>from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
<commit_msg>Change single Lecture query to use first() in stead of all()<commit_after> | from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
Change single Lecture query to use first() in stead of all()from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
| <commit_before>from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
db_lectures = Lecture.query.filter(Lecture.id == lecture_id).all()
if not db_lectures:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
lecture = db_lectures[0]
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
<commit_msg>Change single Lecture query to use first() in stead of all()<commit_after>from flask import request
from flask_restful import Resource, Api, abort, reqparse
from .models import db, Comment, Lecture
api = Api()
class CommentListResource(Resource):
def get(self, lecture_id):
db_lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not db_lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
db_comments = Comment.query.filter(Comment.lecture_id == lecture_id)
comments = [
{'id': c.id, 'content': c.content}
for c in db_comments
]
return {
'comments': comments
}
def post(self, lecture_id):
lecture = Lecture.query.filter(Lecture.id == lecture_id).first()
if not lecture:
abort(404, message="Lecture {} does not exist".format(lecture_id))
parser = reqparse.RequestParser()
parser.add_argument('data', help='Text content of comment')
args = parser.parse_args()
if not args.data:
abort(400, message="Comment has no data parameter")
content = args.data
comment = Comment(content, lecture)
db.session.add(comment)
db.session.commit()
return {
'id': comment.id
}
api.add_resource(CommentListResource, '/api/0/lectures/<lecture_id>/comments')
|
8fb6f506bae11377ca3e3d040ce31d81eaa81d3e | localore/people/wagtail_hooks.py | localore/people/wagtail_hooks.py | from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
# pylint: disable=no-self-use
def full_name(self, obj):
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
wagtailmodeladmin_register(PeopleAdmin)
| from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
| Add profile photos to list of people. | Add profile photos to list of people.
| Python | mpl-2.0 | ghostwords/localore,ghostwords/localore,ghostwords/localore | from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
# pylint: disable=no-self-use
def full_name(self, obj):
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
wagtailmodeladmin_register(PeopleAdmin)
Add profile photos to list of people. | from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
| <commit_before>from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
# pylint: disable=no-self-use
def full_name(self, obj):
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
wagtailmodeladmin_register(PeopleAdmin)
<commit_msg>Add profile photos to list of people.<commit_after> | from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
| from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
# pylint: disable=no-self-use
def full_name(self, obj):
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
wagtailmodeladmin_register(PeopleAdmin)
Add profile photos to list of people.from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
| <commit_before>from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
# pylint: disable=no-self-use
def full_name(self, obj):
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
wagtailmodeladmin_register(PeopleAdmin)
<commit_msg>Add profile photos to list of people.<commit_after>from django.utils.html import format_html
from wagtailmodeladmin.options import ModelAdmin, wagtailmodeladmin_register
from .models import Person
class PeopleAdmin(ModelAdmin):
model = Person
menu_icon = 'group'
menu_label = 'Team'
menu_order = 300
list_display = ('profile_photo', 'full_name', 'production', 'role')
list_filter = ('role', 'production')
ordering = ('-production',)
search_fields = ('last_name', 'first_name', 'biography')
def full_name(self, obj): # pylint: disable=no-self-use
return "%s %s" % (
obj.first_name,
obj.last_name
)
full_name.short_description = 'name'
full_name.admin_order_field = 'last_name'
def profile_photo(self, obj):
return format_html(
'<img src="{}" title="{}" alt="{}" style="height:40px">',
obj.photo.file.url,
obj.photo,
"team member profile photo of " + self.full_name(obj)
)
profile_photo.allow_tags = True
profile_photo.short_description = 'photo'
wagtailmodeladmin_register(PeopleAdmin)
|
1492d79dc422cb37ddf2dd66558bd954a0e08bfb | download-records-s3.py | download-records-s3.py | #!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
| #!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
| Fix string format for week numbers < 10 | Fix string format for week numbers < 10
| Python | mit | gertvv/ictrp-retrieval,gertvv/ictrp-retrieval | #!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
Fix string format for week numbers < 10 | #!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
<commit_msg>Fix string format for week numbers < 10<commit_after> | #!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
| #!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
Fix string format for week numbers < 10#!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
<commit_msg>Fix string format for week numbers < 10<commit_after>#!/usr/bin/python
import sys
import gzip
import datetime
import tempfile
import boto3
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('[%(levelname)s] %(asctime)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
import listRecords
import download
def main():
dataset = 'ictrp-raw-{}-w{:02d}.xml.gz'.format(*datetime.datetime.today().isocalendar())
idlist = listRecords.allList()
with tempfile.TemporaryFile() as tmpfile:
with gzip.GzipFile('raw.xml.gz', 'w', 9, tmpfile) as outfile:
failed = download.downloadRecords(idlist, outfile, True)
logger.info("Failed (all attempts): {}".format(str(failed)))
tmpfile.seek(0)
# write to s3
s3 = boto3.resource('s3')
object = s3.Bucket('ictrp-data').put_object(Key=dataset, Body=tmpfile)
object.Acl().put(ACL='public-read')
if __name__ == "__main__":
main()
|
116735d900b9ad92ae8ad265e5478f232e1474be | cartesius/colors.py | cartesius/colors.py | # -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = temp / 256
green = temp % 256
temp = temp / 256
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
| # -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = int(temp / 256)
green = temp % 256
temp = int(temp / 256)
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
| Fix for integer division error: | Fix for integer division error:
$ python3 create_images_and_readme.py
Processing <function test_circles at 0x10cda6400>
written: graph-0-0.png
written: graph-0-1.png
Processing <function test_piechart_1 at 0x10d1d4a60>
written: graph-1-0.png
written: graph-1-1.png
Processing <function test_piechart_2 at 0x10d1ed1e0>
written: graph-2-0.png
written: graph-2-1.png
Processing <function test_barchart_1 at 0x10d1edb70>
written: graph-3-0.png
written: graph-3-1.png
Processing <function test_barchart_2 at 0x10d1edbf8>
written: graph-4-0.png
written: graph-4-1.png
Processing <function test_barchart_horizontal at 0x10d1edc80>
written: graph-5-0.png
written: graph-5-1.png
Processing <function test_barchart_with_generator at 0x10d1edd08>
written: graph-6-0.png
written: graph-6-1.png
Traceback (most recent call last):
File "create_images_and_readme.py", line 698, in <module>
images = function()
File "create_images_and_readme.py", line 216, in test_function
return coordinate_system.draw(300, 200), coordinate_system.draw(300, 200, antialiasing=True)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 246, in draw
self.__draw_elements(image=image, draw=draw, draw_handler=draw_handler, hide_x_axis=hide_x_axis, hide_y_axis=hide_y_axis)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 211, in __draw_elements
element.draw(image=image, draw=draw, draw_handler=draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 304, in draw
self.process_image(draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/charts.py", line 373, in process_image
draw_handler.draw_line(x1, y1, x2, y2, self.get_color_with_transparency(self.color))
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 397, in draw_line
self.pil_draw.line((image_x1, image_y1, image_x2, image_y2), color)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 177, in line
ink, fill = self._getink(fill)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 125, in _getink
ink = self.draw.draw_ink(ink, self.mode)
TypeError: integer argument expected, got float
| Python | apache-2.0 | tkrajina/cartesius,tkrajina/cartesius | # -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = temp / 256
green = temp % 256
temp = temp / 256
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
Fix for integer division error:
$ python3 create_images_and_readme.py
Processing <function test_circles at 0x10cda6400>
written: graph-0-0.png
written: graph-0-1.png
Processing <function test_piechart_1 at 0x10d1d4a60>
written: graph-1-0.png
written: graph-1-1.png
Processing <function test_piechart_2 at 0x10d1ed1e0>
written: graph-2-0.png
written: graph-2-1.png
Processing <function test_barchart_1 at 0x10d1edb70>
written: graph-3-0.png
written: graph-3-1.png
Processing <function test_barchart_2 at 0x10d1edbf8>
written: graph-4-0.png
written: graph-4-1.png
Processing <function test_barchart_horizontal at 0x10d1edc80>
written: graph-5-0.png
written: graph-5-1.png
Processing <function test_barchart_with_generator at 0x10d1edd08>
written: graph-6-0.png
written: graph-6-1.png
Traceback (most recent call last):
File "create_images_and_readme.py", line 698, in <module>
images = function()
File "create_images_and_readme.py", line 216, in test_function
return coordinate_system.draw(300, 200), coordinate_system.draw(300, 200, antialiasing=True)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 246, in draw
self.__draw_elements(image=image, draw=draw, draw_handler=draw_handler, hide_x_axis=hide_x_axis, hide_y_axis=hide_y_axis)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 211, in __draw_elements
element.draw(image=image, draw=draw, draw_handler=draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 304, in draw
self.process_image(draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/charts.py", line 373, in process_image
draw_handler.draw_line(x1, y1, x2, y2, self.get_color_with_transparency(self.color))
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 397, in draw_line
self.pil_draw.line((image_x1, image_y1, image_x2, image_y2), color)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 177, in line
ink, fill = self._getink(fill)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 125, in _getink
ink = self.draw.draw_ink(ink, self.mode)
TypeError: integer argument expected, got float | # -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = int(temp / 256)
green = temp % 256
temp = int(temp / 256)
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
| <commit_before># -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = temp / 256
green = temp % 256
temp = temp / 256
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
<commit_msg>Fix for integer division error:
$ python3 create_images_and_readme.py
Processing <function test_circles at 0x10cda6400>
written: graph-0-0.png
written: graph-0-1.png
Processing <function test_piechart_1 at 0x10d1d4a60>
written: graph-1-0.png
written: graph-1-1.png
Processing <function test_piechart_2 at 0x10d1ed1e0>
written: graph-2-0.png
written: graph-2-1.png
Processing <function test_barchart_1 at 0x10d1edb70>
written: graph-3-0.png
written: graph-3-1.png
Processing <function test_barchart_2 at 0x10d1edbf8>
written: graph-4-0.png
written: graph-4-1.png
Processing <function test_barchart_horizontal at 0x10d1edc80>
written: graph-5-0.png
written: graph-5-1.png
Processing <function test_barchart_with_generator at 0x10d1edd08>
written: graph-6-0.png
written: graph-6-1.png
Traceback (most recent call last):
File "create_images_and_readme.py", line 698, in <module>
images = function()
File "create_images_and_readme.py", line 216, in test_function
return coordinate_system.draw(300, 200), coordinate_system.draw(300, 200, antialiasing=True)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 246, in draw
self.__draw_elements(image=image, draw=draw, draw_handler=draw_handler, hide_x_axis=hide_x_axis, hide_y_axis=hide_y_axis)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 211, in __draw_elements
element.draw(image=image, draw=draw, draw_handler=draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 304, in draw
self.process_image(draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/charts.py", line 373, in process_image
draw_handler.draw_line(x1, y1, x2, y2, self.get_color_with_transparency(self.color))
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 397, in draw_line
self.pil_draw.line((image_x1, image_y1, image_x2, image_y2), color)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 177, in line
ink, fill = self._getink(fill)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 125, in _getink
ink = self.draw.draw_ink(ink, self.mode)
TypeError: integer argument expected, got float<commit_after> | # -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = int(temp / 256)
green = temp % 256
temp = int(temp / 256)
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
| # -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = temp / 256
green = temp % 256
temp = temp / 256
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
Fix for integer division error:
$ python3 create_images_and_readme.py
Processing <function test_circles at 0x10cda6400>
written: graph-0-0.png
written: graph-0-1.png
Processing <function test_piechart_1 at 0x10d1d4a60>
written: graph-1-0.png
written: graph-1-1.png
Processing <function test_piechart_2 at 0x10d1ed1e0>
written: graph-2-0.png
written: graph-2-1.png
Processing <function test_barchart_1 at 0x10d1edb70>
written: graph-3-0.png
written: graph-3-1.png
Processing <function test_barchart_2 at 0x10d1edbf8>
written: graph-4-0.png
written: graph-4-1.png
Processing <function test_barchart_horizontal at 0x10d1edc80>
written: graph-5-0.png
written: graph-5-1.png
Processing <function test_barchart_with_generator at 0x10d1edd08>
written: graph-6-0.png
written: graph-6-1.png
Traceback (most recent call last):
File "create_images_and_readme.py", line 698, in <module>
images = function()
File "create_images_and_readme.py", line 216, in test_function
return coordinate_system.draw(300, 200), coordinate_system.draw(300, 200, antialiasing=True)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 246, in draw
self.__draw_elements(image=image, draw=draw, draw_handler=draw_handler, hide_x_axis=hide_x_axis, hide_y_axis=hide_y_axis)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 211, in __draw_elements
element.draw(image=image, draw=draw, draw_handler=draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 304, in draw
self.process_image(draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/charts.py", line 373, in process_image
draw_handler.draw_line(x1, y1, x2, y2, self.get_color_with_transparency(self.color))
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 397, in draw_line
self.pil_draw.line((image_x1, image_y1, image_x2, image_y2), color)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 177, in line
ink, fill = self._getink(fill)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 125, in _getink
ink = self.draw.draw_ink(ink, self.mode)
TypeError: integer argument expected, got float# -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = int(temp / 256)
green = temp % 256
temp = int(temp / 256)
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
| <commit_before># -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = temp / 256
green = temp % 256
temp = temp / 256
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
<commit_msg>Fix for integer division error:
$ python3 create_images_and_readme.py
Processing <function test_circles at 0x10cda6400>
written: graph-0-0.png
written: graph-0-1.png
Processing <function test_piechart_1 at 0x10d1d4a60>
written: graph-1-0.png
written: graph-1-1.png
Processing <function test_piechart_2 at 0x10d1ed1e0>
written: graph-2-0.png
written: graph-2-1.png
Processing <function test_barchart_1 at 0x10d1edb70>
written: graph-3-0.png
written: graph-3-1.png
Processing <function test_barchart_2 at 0x10d1edbf8>
written: graph-4-0.png
written: graph-4-1.png
Processing <function test_barchart_horizontal at 0x10d1edc80>
written: graph-5-0.png
written: graph-5-1.png
Processing <function test_barchart_with_generator at 0x10d1edd08>
written: graph-6-0.png
written: graph-6-1.png
Traceback (most recent call last):
File "create_images_and_readme.py", line 698, in <module>
images = function()
File "create_images_and_readme.py", line 216, in test_function
return coordinate_system.draw(300, 200), coordinate_system.draw(300, 200, antialiasing=True)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 246, in draw
self.__draw_elements(image=image, draw=draw, draw_handler=draw_handler, hide_x_axis=hide_x_axis, hide_y_axis=hide_y_axis)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 211, in __draw_elements
element.draw(image=image, draw=draw, draw_handler=draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 304, in draw
self.process_image(draw_handler)
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/charts.py", line 373, in process_image
draw_handler.draw_line(x1, y1, x2, y2, self.get_color_with_transparency(self.color))
File "/Users/loos/Documents/schule/src/PycharmProjects/cartesius/cartesius/main.py", line 397, in draw_line
self.pil_draw.line((image_x1, image_y1, image_x2, image_y2), color)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 177, in line
ink, fill = self._getink(fill)
File "/usr/local/lib/python3.5/site-packages/PIL/ImageDraw.py", line 125, in _getink
ink = self.draw.draw_ink(ink, self.mode)
TypeError: integer argument expected, got float<commit_after># -*- coding: utf-8 -*-
""" Utility functions folr colors """
def get_color(color):
""" Can convert from integer to (r, g, b) """
if not color:
return None
if isinstance(color, int):
temp = color
blue = temp % 256
temp = int(temp / 256)
green = temp % 256
temp = int(temp / 256)
red = temp % 256
return (red, green, blue)
if not len(color) == 3:
raise Exception('Invalid color {0}'.format(color))
return color
def brighten(color, n):
return (int((color[0] + n) % 256), int((color[1] + n) % 256), int((color[2] + n) % 256))
def darken(color, n):
return brighten(color, -n)
def get_color_between(color1, color2, i):
""" i is a number between 0 and 1, if 0 then color1, if 1 color2, ... """
if i <= 0:
return color1
if i >= 1:
return color2
return (int(color1[0] + (color2[0] - color1[0]) * i),
int(color1[1] + (color2[1] - color1[1]) * i),
int(color1[2] + (color2[2] - color1[2]) * i))
|
06d6f8d70d09214dbdcff2af2637deeb025cd22e | radar/radar/validation/family_histories.py | radar/radar/validation/family_histories.py | from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
| from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
def pre_validate(self, obj):
if not obj.family_history:
obj.relatives = []
return obj
| Clear relatives if no family history | Clear relatives if no family history
| Python | agpl-3.0 | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
Clear relatives if no family history | from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
def pre_validate(self, obj):
if not obj.family_history:
obj.relatives = []
return obj
| <commit_before>from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
<commit_msg>Clear relatives if no family history<commit_after> | from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
def pre_validate(self, obj):
if not obj.family_history:
obj.relatives = []
return obj
| from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
Clear relatives if no family historyfrom radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
def pre_validate(self, obj):
if not obj.family_history:
obj.relatives = []
return obj
| <commit_before>from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
<commit_msg>Clear relatives if no family history<commit_after>from radar.validation.groups import CohortGroupValidationMixin
from radar.validation.core import Field, Validation
from radar.validation.meta import MetaValidationMixin
from radar.validation.patients import PatientValidationMixin
from radar.validation.validators import required, none_if_blank, optional, max_length, in_
from radar.validation.core import ListField
from radar.models.family_histories import RELATIONSHIPS
class FamilyHistoryRelativeValidation(Validation):
relationship = Field([required(), in_(RELATIONSHIPS.keys())])
patient = Field([optional()])
class FamilyHistoryValidation(PatientValidationMixin, CohortGroupValidationMixin, MetaValidationMixin, Validation):
parental_consanguinity = Field([required()])
family_history = Field([required()])
other_family_history = Field([none_if_blank(), optional(), max_length(10000)])
relatives = ListField(FamilyHistoryRelativeValidation())
def pre_validate(self, obj):
if not obj.family_history:
obj.relatives = []
return obj
|
e94b3ad6c393d6758d6bc5c5ca2b7e59febf710c | solidity/python/constants/PrintExpScalingFactors.py | solidity/python/constants/PrintExpScalingFactors.py | from common import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
| from math import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
| Revert previous commit on this specific file (mistake). | Revert previous commit on this specific file (mistake).
| Python | apache-2.0 | enjin/contracts | from common import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
Revert previous commit on this specific file (mistake). | from math import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
| <commit_before>from common import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
<commit_msg>Revert previous commit on this specific file (mistake).<commit_after> | from math import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
| from common import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
Revert previous commit on this specific file (mistake).from math import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
| <commit_before>from common import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
<commit_msg>Revert previous commit on this specific file (mistake).<commit_after>from math import exp
MIN_PRECISION = 32
for n in [0.5,1.0,2.0,3.0]:
print ' uint256 constant SCALED_EXP_{} = 0x{:x};'.format(n,int(exp(n)*(1<<MIN_PRECISION))).replace('.','P')
print ' uint256 constant SCALED_VAL_{} = 0x{:x};'.format(n,int( (n)*(1<<MIN_PRECISION))).replace('.','P')
|
dbe5e67d2685083769e7d154926e6a1a234fa3c4 | src/livestreamer/stream.py | src/livestreamer/stream.py | from livestreamer.utils import urlopen
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
| from livestreamer.utils import urlopen
import os
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
self.params["_err"] = open(os.devnull, "w")
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
| Fix rtmpdump locking up when stderr buffer is filled. | Fix rtmpdump locking up when stderr buffer is filled.
| Python | bsd-2-clause | charmander/livestreamer,sbstp/streamlink,wolftankk/livestreamer,bastimeyer/streamlink,caorong/livestreamer,breunigs/livestreamer,lyhiving/livestreamer,breunigs/livestreamer,okaywit/livestreamer,derrod/livestreamer,fishscene/streamlink,back-to/streamlink,streamlink/streamlink,wlerin/streamlink,melmorabity/streamlink,sbstp/streamlink,Dobatymo/livestreamer,streamlink/streamlink,back-to/streamlink,intact/livestreamer,Masaz-/livestreamer,ethanhlc/streamlink,charmander/livestreamer,blxd/livestreamer,programming086/livestreamer,blxd/livestreamer,derrod/livestreamer,caorong/livestreamer,flijloku/livestreamer,mmetak/streamlink,javiercantero/streamlink,lyhiving/livestreamer,melmorabity/streamlink,chhe/streamlink,Klaudit/livestreamer,Feverqwe/livestreamer,chhe/livestreamer,beardypig/streamlink,beardypig/streamlink,gravyboat/streamlink,Feverqwe/livestreamer,chhe/livestreamer,chrisnicholls/livestreamer,wlerin/streamlink,intact/livestreamer,bastimeyer/streamlink,wolftankk/livestreamer,chrippa/livestreamer,Saturn/livestreamer,chhe/streamlink,Dobatymo/livestreamer,programming086/livestreamer,gtmanfred/livestreamer,chrippa/livestreamer,asermax/livestreamer,mmetak/streamlink,hmit/livestreamer,gravyboat/streamlink,javiercantero/streamlink,Saturn/livestreamer,Klaudit/livestreamer,hmit/livestreamer,fishscene/streamlink,gtmanfred/livestreamer,jtsymon/livestreamer,ethanhlc/streamlink,Masaz-/livestreamer,okaywit/livestreamer,jtsymon/livestreamer,flijloku/livestreamer | from livestreamer.utils import urlopen
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
Fix rtmpdump locking up when stderr buffer is filled. | from livestreamer.utils import urlopen
import os
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
self.params["_err"] = open(os.devnull, "w")
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
| <commit_before>from livestreamer.utils import urlopen
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
<commit_msg>Fix rtmpdump locking up when stderr buffer is filled.<commit_after> | from livestreamer.utils import urlopen
import os
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
self.params["_err"] = open(os.devnull, "w")
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
| from livestreamer.utils import urlopen
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
Fix rtmpdump locking up when stderr buffer is filled.from livestreamer.utils import urlopen
import os
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
self.params["_err"] = open(os.devnull, "w")
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
| <commit_before>from livestreamer.utils import urlopen
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
<commit_msg>Fix rtmpdump locking up when stderr buffer is filled.<commit_after>from livestreamer.utils import urlopen
import os
import pbs
class StreamError(Exception):
pass
class Stream(object):
def open(self):
raise NotImplementedError
class RTMPStream(Stream):
def __init__(self, params):
self.params = params or {}
def open(self):
try:
rtmpdump = pbs.rtmpdump
except pbs.CommandNotFound:
raise StreamError("Unable to find 'rtmpdump' command")
self.params["flv"] = "-"
self.params["_bg"] = True
self.params["_err"] = open(os.devnull, "w")
stream = rtmpdump(**self.params)
return stream.process.stdout
class HTTPStream(Stream):
def __init__(self, url):
self.url = url
def open(self):
return urlopen(self.url)
|
cf22defd6a705cab79bc24b82377e23b9d798dbf | nopassword/backends/base.py | nopassword/backends/base.py | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = datetime.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
| # -*- coding: utf-8 -*-
from datetime import timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.utils import timezone
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = timezone.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
| Use timezone.now() instead of datetime.now() | fix: Use timezone.now() instead of datetime.now()
| Python | mit | relekang/django-nopassword,relekang/django-nopassword | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = datetime.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
fix: Use timezone.now() instead of datetime.now() | # -*- coding: utf-8 -*-
from datetime import timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.utils import timezone
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = timezone.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
| <commit_before># -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = datetime.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
<commit_msg>fix: Use timezone.now() instead of datetime.now()<commit_after> | # -*- coding: utf-8 -*-
from datetime import timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.utils import timezone
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = timezone.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
| # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = datetime.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
fix: Use timezone.now() instead of datetime.now()# -*- coding: utf-8 -*-
from datetime import timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.utils import timezone
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = timezone.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
| <commit_before># -*- coding: utf-8 -*-
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = datetime.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
<commit_msg>fix: Use timezone.now() instead of datetime.now()<commit_after># -*- coding: utf-8 -*-
from datetime import timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.backends import ModelBackend
from django.utils import timezone
from nopassword.models import LoginCode
class NoPasswordBackend(ModelBackend):
def authenticate(self, request, username=None, code=None, **kwargs):
if username is None:
username = kwargs.get(get_user_model().USERNAME_FIELD)
if not username or not code:
return
try:
user = get_user_model()._default_manager.get_by_natural_key(username)
if not self.user_can_authenticate(user):
return
timeout = getattr(settings, 'NOPASSWORD_LOGIN_CODE_TIMEOUT', 900)
timestamp = timezone.now() - timedelta(seconds=timeout)
# We don't delete the login code when authenticating,
# as that is done during validation of the login form
# and validation should not have any side effects.
# It is the responsibility of the view/form to delete the token
# as soon as the login was successfull.
user.login_code = LoginCode.objects.get(user=user, code=code, timestamp__gt=timestamp)
return user
except (get_user_model().DoesNotExist, LoginCode.DoesNotExist):
return
def send_login_code(self, code, context, **kwargs):
raise NotImplementedError
|
a5befe542e857ec36717f7f8da53ff9f2c2af7e6 | natasha/__init__.py | natasha/__init__.py | from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo
class Combinator(object):
DEFAULT_GRAMMARS = [
Person,
Geo,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
| from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo, Money, Date
class Combinator(object):
DEFAULT_GRAMMARS = [
Money,
Person,
Geo,
Date,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
| Add new grammars to Combinator.DEFAULT_GRAMMARS | Add new grammars to Combinator.DEFAULT_GRAMMARS
| Python | mit | natasha/natasha | from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo
class Combinator(object):
DEFAULT_GRAMMARS = [
Person,
Geo,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
Add new grammars to Combinator.DEFAULT_GRAMMARS | from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo, Money, Date
class Combinator(object):
DEFAULT_GRAMMARS = [
Money,
Person,
Geo,
Date,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
| <commit_before>from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo
class Combinator(object):
DEFAULT_GRAMMARS = [
Person,
Geo,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
<commit_msg>Add new grammars to Combinator.DEFAULT_GRAMMARS<commit_after> | from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo, Money, Date
class Combinator(object):
DEFAULT_GRAMMARS = [
Money,
Person,
Geo,
Date,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
| from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo
class Combinator(object):
DEFAULT_GRAMMARS = [
Person,
Geo,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
Add new grammars to Combinator.DEFAULT_GRAMMARSfrom copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo, Money, Date
class Combinator(object):
DEFAULT_GRAMMARS = [
Money,
Person,
Geo,
Date,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
| <commit_before>from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo
class Combinator(object):
DEFAULT_GRAMMARS = [
Person,
Geo,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
<commit_msg>Add new grammars to Combinator.DEFAULT_GRAMMARS<commit_after>from copy import copy
from collections import deque
from yargy import FactParser
from natasha.grammars import Person, Geo, Money, Date
class Combinator(object):
DEFAULT_GRAMMARS = [
Money,
Person,
Geo,
Date,
]
def __init__(self, grammars=None, cache_size=50000):
self.grammars = grammars or self.DEFAULT_GRAMMARS
self.parser = FactParser(cache_size=cache_size)
def extract(self, text):
tokens = deque(self.parser.tokenizer.transform(text))
for grammar in self.grammars:
for grammar_type, rule in grammar.__members__.items():
for match in self.parser.extract(copy(tokens), rule.value):
yield (grammar, grammar_type, match)
|
e382566a5a74a0eb2545456eb966cac569d5ebb4 | pylatex/numpy.py | pylatex/numpy.py | # -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', argument=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
| # -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', arguments=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
| Fix bug in VectorName, caused by new Command arguments | Fix bug in VectorName, caused by new Command arguments
| Python | mit | JelteF/PyLaTeX,ovaskevich/PyLaTeX,bjodah/PyLaTeX,sebastianhaas/PyLaTeX,JelteF/PyLaTeX,votti/PyLaTeX,bjodah/PyLaTeX,jendas1/PyLaTeX,jendas1/PyLaTeX,sebastianhaas/PyLaTeX,votti/PyLaTeX,ovaskevich/PyLaTeX | # -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', argument=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
Fix bug in VectorName, caused by new Command arguments | # -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', arguments=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
| <commit_before># -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', argument=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
<commit_msg>Fix bug in VectorName, caused by new Command arguments<commit_after> | # -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', arguments=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
| # -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', argument=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
Fix bug in VectorName, caused by new Command arguments# -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', arguments=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
| <commit_before># -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', argument=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
<commit_msg>Fix bug in VectorName, caused by new Command arguments<commit_after># -*- coding: utf-8 -*-
"""
pylatex.numpy
~~~~~~~~~~~~~
This module implements the classes that deals with numpy objects.
:copyright: (c) 2014 by Jelte Fennema.
:license: MIT, see License for more details.
"""
import numpy as np
from pylatex.base_classes import BaseLaTeXClass
from pylatex.package import Package
from pylatex.command import Command
class VectorName(Command):
def __init__(self, name):
super().__init__('mathbf', arguments=name)
class Matrix(BaseLaTeXClass):
def __init__(self, matrix, name='', mtype='p', alignment=None):
self.mtype = mtype
self.matrix = matrix
self.alignment = alignment
self.name = name
super().__init__(packages=[Package('amsmath')])
def dumps(self):
string = r'\begin{'
mtype = self.mtype + 'matrix'
if self.alignment is not None:
mtype += '*'
alignment = '{' + self.alignment + '}'
else:
alignment = ''
string += mtype + '}' + alignment
string += '\n'
shape = self.matrix.shape
for (y, x), value in np.ndenumerate(self.matrix):
if x:
string += '&'
string += str(value)
if x == shape[1] - 1 and y != shape[0] - 1:
string += r'\\' + '\n'
string += '\n'
string += r'\end{' + mtype + '}'
super().dumps()
return string
|
95bac0b68e271d7ad5a4f8fa22c441d18a65390c | client/serial_datagrams.py | client/serial_datagrams.py | import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
data = data + struct.pack('>I', crc32(data))
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
| import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
# to generate same numeric value for all python versions
crc = crc32(data) & 0xffffffff
data = data + struct.pack('>I', crc)
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
| Correct crc32 behaviour for all python versions. | Correct crc32 behaviour for all python versions.
This fixes signedness issue with crc32 from different python versions.
Was suggested in the documentation: https://docs.python.org/3.4/library/zlib.html
| Python | bsd-2-clause | cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader | import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
data = data + struct.pack('>I', crc32(data))
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
Correct crc32 behaviour for all python versions.
This fixes signedness issue with crc32 from different python versions.
Was suggested in the documentation: https://docs.python.org/3.4/library/zlib.html | import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
# to generate same numeric value for all python versions
crc = crc32(data) & 0xffffffff
data = data + struct.pack('>I', crc)
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
| <commit_before>import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
data = data + struct.pack('>I', crc32(data))
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
<commit_msg>Correct crc32 behaviour for all python versions.
This fixes signedness issue with crc32 from different python versions.
Was suggested in the documentation: https://docs.python.org/3.4/library/zlib.html<commit_after> | import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
# to generate same numeric value for all python versions
crc = crc32(data) & 0xffffffff
data = data + struct.pack('>I', crc)
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
| import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
data = data + struct.pack('>I', crc32(data))
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
Correct crc32 behaviour for all python versions.
This fixes signedness issue with crc32 from different python versions.
Was suggested in the documentation: https://docs.python.org/3.4/library/zlib.htmlimport struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
# to generate same numeric value for all python versions
crc = crc32(data) & 0xffffffff
data = data + struct.pack('>I', crc)
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
| <commit_before>import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
data = data + struct.pack('>I', crc32(data))
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
<commit_msg>Correct crc32 behaviour for all python versions.
This fixes signedness issue with crc32 from different python versions.
Was suggested in the documentation: https://docs.python.org/3.4/library/zlib.html<commit_after>import struct
from zlib import crc32
class CRCMismatchError(RuntimeError):
"""
Error raised when a datagram has an invalid CRC.
"""
pass
class FrameError(RuntimeError):
"""
Error raised when a datagram is too short to be valid.
"""
END = b'\xC0'
ESC = b'\xDB'
ESC_END = b'\xDC'
ESC_ESC = b'\xDD'
def datagram_encode(data):
"""
Encodes the given datagram (bytes object) by adding a CRC at the end then an end marker.
It also escapes the end marker correctly.
"""
# to generate same numeric value for all python versions
crc = crc32(data) & 0xffffffff
data = data + struct.pack('>I', crc)
data = data.replace(ESC, ESC + ESC_ESC)
data = data.replace(END, ESC + ESC_END)
return data + END
def datagram_decode(data):
"""
Decodes a datagram. Exact inverse of datagram_encode()
"""
# Checks if the data is at least long enough for the CRC and the END marker
if len(data) < 5:
raise FrameError
data = data[:-1] # remote end marker
data = data.replace(ESC + ESC_END, END)
data = data.replace(ESC + ESC_ESC, ESC)
expected_crc = struct.unpack('>I', data[-4:])[0]
actual_crc = crc32(data[:-4])
if expected_crc != actual_crc:
raise CRCMismatchError
return data[:-4]
|
31c0863d088488da5dd85e2cbe3c01c6b01aa4a2 | system_tests/test_default.py | system_tests/test_default.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
| # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
| Fix system tests when running on GCE | Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.
| Python | apache-2.0 | googleapis/google-auth-library-python,googleapis/google-auth-library-python | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations. | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
| <commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
<commit_msg>Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.<commit_after> | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
| # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
| <commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
else:
assert project_id is None
verify_refresh(credentials)
<commit_msg>Fix system tests when running on GCE
The new project ID logic for Cloud SDK invokes Cloud SDK directly. Cloud SDK helpfully falls back to the GCE project ID if the project ID is unset in the configuration. This breaks one of our previous expectations.<commit_after># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import google.auth
EXPECT_PROJECT_ID = os.environ.get('EXPECT_PROJECT_ID')
def test_application_default_credentials(verify_refresh):
credentials, project_id = google.auth.default()
if EXPECT_PROJECT_ID is not None:
assert project_id is not None
verify_refresh(credentials)
|
f042f6c9799d70edb41ae9495adf8bb78ed23e13 | elections/ar_elections_2015/settings.py | elections/ar_elections_2015/settings.py | # -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
| # -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015|parlamentarios-mercosur-regional-paso-2015|parlamentarios-mercosur-unico-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
| Add some missing election slugs to Argentina's ELECTION_RE | AR: Add some missing election slugs to Argentina's ELECTION_RE
| Python | agpl-3.0 | mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextmp-popit,datamade/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit | # -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
AR: Add some missing election slugs to Argentina's ELECTION_RE | # -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015|parlamentarios-mercosur-regional-paso-2015|parlamentarios-mercosur-unico-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
| <commit_before># -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
<commit_msg>AR: Add some missing election slugs to Argentina's ELECTION_RE<commit_after> | # -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015|parlamentarios-mercosur-regional-paso-2015|parlamentarios-mercosur-unico-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
| # -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
AR: Add some missing election slugs to Argentina's ELECTION_RE# -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015|parlamentarios-mercosur-regional-paso-2015|parlamentarios-mercosur-unico-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
| <commit_before># -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
<commit_msg>AR: Add some missing election slugs to Argentina's ELECTION_RE<commit_after># -*- coding: utf-8 -*-
ELECTION_RE = '(?P<election>diputados-argentina-paso-2015|gobernadores-argentina-paso-2015|senadores-argentina-paso-2015|presidentes-argentina-paso-2015|parlamentarios-mercosur-regional-paso-2015|parlamentarios-mercosur-unico-paso-2015)'
MAPIT_BASE_URL = 'http://argentina.mapit.staging.mysociety.org/'
SITE_OWNER = 'YoQuieroSaber'
COPYRIGHT_HOLDER = 'YoQuieroSaber'
|
cf6c18925c05a6f009573c204631a09ed1957227 | mama_cas/utils.py | mama_cas/utils.py | import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
existing parameters already exist with the same name, they
will be overwritten.
Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
| import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
parameters already exist with the same name, they will be
overwritten. Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
| Tweak comment text for clarity | Tweak comment text for clarity
| Python | bsd-3-clause | harlov/django-mama-cas,harlov/django-mama-cas,jbittel/django-mama-cas,orbitvu/django-mama-cas,jbittel/django-mama-cas,orbitvu/django-mama-cas,forcityplatform/django-mama-cas,forcityplatform/django-mama-cas | import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
existing parameters already exist with the same name, they
will be overwritten.
Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
Tweak comment text for clarity | import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
parameters already exist with the same name, they will be
overwritten. Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
| <commit_before>import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
existing parameters already exist with the same name, they
will be overwritten.
Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
<commit_msg>Tweak comment text for clarity<commit_after> | import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
parameters already exist with the same name, they will be
overwritten. Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
| import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
existing parameters already exist with the same name, they
will be overwritten.
Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
Tweak comment text for clarityimport urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
parameters already exist with the same name, they will be
overwritten. Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
| <commit_before>import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
existing parameters already exist with the same name, they
will be overwritten.
Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
<commit_msg>Tweak comment text for clarity<commit_after>import urllib
import urlparse
def add_query_params(url, params):
"""
Inject additional query parameters into an existing URL. If
parameters already exist with the same name, they will be
overwritten. Return the modified URL as a string.
"""
# If any of the additional parameters have empty values,
# ignore them
params = dict([(k, v) for k, v in params.items() if v])
parts = list(urlparse.urlparse(url))
query = dict(urlparse.parse_qsl(parts[4]))
query.update(params)
parts[4] = urllib.urlencode(query)
url = urlparse.urlunparse(parts)
return url
def is_scheme_https(url):
"""
Test the scheme of the parameter URL to see if it is HTTPS. If
it is HTTPS return True, otherwise return False.
"""
return 'https' == urlparse.urlparse(url).scheme
|
65504fbb13a61e36f1fe0ea9fcc790efdf7d8fd6 | resolwe/__about__.py | resolwe/__about__.py | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| Change author in about file | Change author in about file
| Python | apache-2.0 | genialis/resolwe,jberci/resolwe,jberci/resolwe,genialis/resolwe | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
Change author in about file | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| <commit_before>"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
<commit_msg>Change author in about file<commit_after> | """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| """Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
Change author in about file"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
| <commit_before>"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis d.o.o.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
<commit_msg>Change author in about file<commit_after>"""Central place for package metadata."""
# NOTE: We use __title__ instead of simply __name__ since the latter would
# interfere with a global variable __name__ denoting object's name.
__title__ = 'resolwe'
__summary__ = 'Open source enterprise dataflow engine in Django'
__url__ = 'https://github.com/genialis/resolwe'
# Semantic versioning is used. For more information see:
# https://packaging.python.org/en/latest/distributing/#semantic-versioning-preferred
__version__ = '10.2.0a2'
__author__ = 'Genialis, Inc.'
__email__ = 'dev-team@genialis.com'
__license__ = 'Apache License (2.0)'
__copyright__ = '2015-2018, ' + __author__
__all__ = (
'__title__', '__summary__', '__url__', '__version__', '__author__',
'__email__', '__license__', '__copyright__',
)
|
81235c83c094e3dbca62f19d13c83a002d01da99 | bokeh/mixins.py | bokeh/mixins.py | """Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("10pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("black")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline)
| """Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("12pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("#444444")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline, default="bottom")
| Unify bokeh's text properties' values with bokehjs | Unify bokeh's text properties' values with bokehjs
| Python | bsd-3-clause | eteq/bokeh,jakirkham/bokeh,Karel-van-de-Plassche/bokeh,caseyclements/bokeh,bokeh/bokeh,xguse/bokeh,timothydmorton/bokeh,azjps/bokeh,ptitjano/bokeh,timothydmorton/bokeh,canavandl/bokeh,alan-unravel/bokeh,ahmadia/bokeh,eteq/bokeh,stuart-knock/bokeh,CrazyGuo/bokeh,ChristosChristofidis/bokeh,percyfal/bokeh,aavanian/bokeh,laurent-george/bokeh,rhiever/bokeh,stonebig/bokeh,ChinaQuants/bokeh,rothnic/bokeh,KasperPRasmussen/bokeh,rs2/bokeh,abele/bokeh,ptitjano/bokeh,srinathv/bokeh,roxyboy/bokeh,birdsarah/bokeh,saifrahmed/bokeh,maxalbert/bokeh,tacaswell/bokeh,birdsarah/bokeh,rothnic/bokeh,srinathv/bokeh,roxyboy/bokeh,jplourenco/bokeh,htygithub/bokeh,ChristosChristofidis/bokeh,akloster/bokeh,akloster/bokeh,deeplook/bokeh,azjps/bokeh,laurent-george/bokeh,daodaoliang/bokeh,xguse/bokeh,bsipocz/bokeh,phobson/bokeh,jakirkham/bokeh,schoolie/bokeh,mindriot101/bokeh,PythonCharmers/bokeh,saifrahmed/bokeh,aavanian/bokeh,lukebarnard1/bokeh,gpfreitas/bokeh,rs2/bokeh,khkaminska/bokeh,satishgoda/bokeh,draperjames/bokeh,jakirkham/bokeh,ericdill/bokeh,muku42/bokeh,satishgoda/bokeh,josherick/bokeh,deeplook/bokeh,stonebig/bokeh,bokeh/bokeh,justacec/bokeh,CrazyGuo/bokeh,maxalbert/bokeh,azjps/bokeh,schoolie/bokeh,stonebig/bokeh,justacec/bokeh,muku42/bokeh,almarklein/bokeh,rhiever/bokeh,DuCorey/bokeh,phobson/bokeh,rhiever/bokeh,roxyboy/bokeh,dennisobrien/bokeh,aiguofer/bokeh,DuCorey/bokeh,rhiever/bokeh,ericdill/bokeh,rs2/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,awanke/bokeh,laurent-george/bokeh,Karel-van-de-Plassche/bokeh,paultcochrane/bokeh,PythonCharmers/bokeh,paultcochrane/bokeh,KasperPRasmussen/bokeh,alan-unravel/bokeh,carlvlewis/bokeh,gpfreitas/bokeh,srinathv/bokeh,aiguofer/bokeh,laurent-george/bokeh,Karel-van-de-Plassche/bokeh,ChinaQuants/bokeh,timsnyder/bokeh,jakirkham/bokeh,timsnyder/bokeh,dennisobrien/bokeh,caseyclements/bokeh,gpfreitas/bokeh,carlvlewis/bokeh,saifrahmed/bokeh,aiguofer/bokeh,caseyclements/bokeh,CrazyGuo/bokeh,jakirkham/bokeh,ericmjl/bokeh,ptitjano/bokeh,abele/bokeh,maxalbert/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,draperjames/bokeh,Karel-van-de-Plassche/bokeh,percyfal/bokeh,deeplook/bokeh,timothydmorton/bokeh,aiguofer/bokeh,mutirri/bokeh,almarklein/bokeh,PythonCharmers/bokeh,azjps/bokeh,draperjames/bokeh,KasperPRasmussen/bokeh,azjps/bokeh,srinathv/bokeh,carlvlewis/bokeh,matbra/bokeh,quasiben/bokeh,timothydmorton/bokeh,mindriot101/bokeh,justacec/bokeh,rs2/bokeh,eteq/bokeh,ericdill/bokeh,xguse/bokeh,birdsarah/bokeh,aavanian/bokeh,awanke/bokeh,evidation-health/bokeh,ChristosChristofidis/bokeh,jplourenco/bokeh,gpfreitas/bokeh,ericmjl/bokeh,ptitjano/bokeh,josherick/bokeh,evidation-health/bokeh,schoolie/bokeh,quasiben/bokeh,khkaminska/bokeh,clairetang6/bokeh,lukebarnard1/bokeh,bokeh/bokeh,bsipocz/bokeh,tacaswell/bokeh,msarahan/bokeh,maxalbert/bokeh,ahmadia/bokeh,caseyclements/bokeh,clairetang6/bokeh,eteq/bokeh,akloster/bokeh,timsnyder/bokeh,PythonCharmers/bokeh,satishgoda/bokeh,aiguofer/bokeh,bsipocz/bokeh,ahmadia/bokeh,mutirri/bokeh,clairetang6/bokeh,dennisobrien/bokeh,rothnic/bokeh,CrazyGuo/bokeh,alan-unravel/bokeh,aavanian/bokeh,muku42/bokeh,dennisobrien/bokeh,quasiben/bokeh,akloster/bokeh,bokeh/bokeh,philippjfr/bokeh,paultcochrane/bokeh,rs2/bokeh,philippjfr/bokeh,DuCorey/bokeh,htygithub/bokeh,philippjfr/bokeh,xguse/bokeh,abele/bokeh,almarklein/bokeh,ChinaQuants/bokeh,phobson/bokeh,matbra/bokeh,daodaoliang/bokeh,ericmjl/bokeh,percyfal/bokeh,msarahan/bokeh,ericmjl/bokeh,phobson/bokeh,ericdill/bokeh,DuCorey/bokeh,htygithub/bokeh,muku42/bokeh,awanke/bokeh,KasperPRasmussen/bokeh,khkaminska/bokeh,ChinaQuants/bokeh,josherick/bokeh,lukebarnard1/bokeh,timsnyder/bokeh,canavandl/bokeh,canavandl/bokeh,mindriot101/bokeh,stonebig/bokeh,ptitjano/bokeh,josherick/bokeh,canavandl/bokeh,ChristosChristofidis/bokeh,schoolie/bokeh,percyfal/bokeh,timsnyder/bokeh,mutirri/bokeh,philippjfr/bokeh,evidation-health/bokeh,alan-unravel/bokeh,ahmadia/bokeh,abele/bokeh,paultcochrane/bokeh,msarahan/bokeh,tacaswell/bokeh,draperjames/bokeh,khkaminska/bokeh,deeplook/bokeh,rothnic/bokeh,justacec/bokeh,saifrahmed/bokeh,stuart-knock/bokeh,philippjfr/bokeh,mindriot101/bokeh,phobson/bokeh,lukebarnard1/bokeh,stuart-knock/bokeh,daodaoliang/bokeh,percyfal/bokeh,draperjames/bokeh,stuart-knock/bokeh,carlvlewis/bokeh,birdsarah/bokeh,schoolie/bokeh,msarahan/bokeh,tacaswell/bokeh,ericmjl/bokeh,daodaoliang/bokeh,clairetang6/bokeh,dennisobrien/bokeh,awanke/bokeh,matbra/bokeh,aavanian/bokeh,bsipocz/bokeh,jplourenco/bokeh,jplourenco/bokeh,mutirri/bokeh,roxyboy/bokeh,matbra/bokeh,bokeh/bokeh,evidation-health/bokeh,satishgoda/bokeh | """Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("10pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("black")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline)
Unify bokeh's text properties' values with bokehjs | """Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("12pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("#444444")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline, default="bottom")
| <commit_before>"""Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("10pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("black")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline)
<commit_msg>Unify bokeh's text properties' values with bokehjs<commit_after> | """Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("12pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("#444444")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline, default="bottom")
| """Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("10pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("black")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline)
Unify bokeh's text properties' values with bokehjs"""Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("12pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("#444444")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline, default="bottom")
| <commit_before>"""Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("10pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("black")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline)
<commit_msg>Unify bokeh's text properties' values with bokehjs<commit_after>"""Classes that can be mixed-in to HasProps classes to get them the corresponding attributes. """
from .properties import HasProps, ColorSpec, DataSpec, Enum, DashPattern, Int, String
from .enums import LineJoin, LineCap, FontStyle, TextAlign, TextBaseline
class FillProps(HasProps):
""" Mirrors the BokehJS properties.fill_properties class """
fill_color = ColorSpec("gray")
fill_alpha = DataSpec(1.0)
class LineProps(HasProps):
""" Mirrors the BokehJS properties.line_properties class """
line_color = ColorSpec("black")
line_width = DataSpec
line_alpha = DataSpec(1.0)
line_join = Enum(LineJoin)
line_cap = Enum(LineCap)
line_dash = DashPattern
line_dash_offset = Int(0)
class TextProps(HasProps):
""" Mirrors the BokehJS properties.text_properties class """
text_font = String("Helvetica")
text_font_size = String("12pt")
text_font_style = Enum(FontStyle)
text_color = ColorSpec("#444444")
text_alpha = DataSpec(1.0)
text_align = Enum(TextAlign)
text_baseline = Enum(TextBaseline, default="bottom")
|
e081646028e4f3283fb9c7278fed89c3e42cc4d3 | server/tests/api/test_user_api.py | server/tests/api/test_user_api.py | import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response = self.test_client.get('/api/users')
assert response.status_code is 200
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
| import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 0
assert response.status_code is 200
@fixtures('single_user.json')
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 1
assert response.status_code is 200
@fixtures('many_users.json')
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
@fixtures('many_users.json')
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
@fixtures('many_users.json')
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
| Implement tests for getting empty users list and single user list | Implement tests for getting empty users list and single user list
| Python | mit | ganemone/ontheside,ganemone/ontheside,ganemone/ontheside | import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response = self.test_client.get('/api/users')
assert response.status_code is 200
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
Implement tests for getting empty users list and single user list | import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 0
assert response.status_code is 200
@fixtures('single_user.json')
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 1
assert response.status_code is 200
@fixtures('many_users.json')
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
@fixtures('many_users.json')
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
@fixtures('many_users.json')
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
| <commit_before>import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response = self.test_client.get('/api/users')
assert response.status_code is 200
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
<commit_msg>Implement tests for getting empty users list and single user list<commit_after> | import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 0
assert response.status_code is 200
@fixtures('single_user.json')
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 1
assert response.status_code is 200
@fixtures('many_users.json')
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
@fixtures('many_users.json')
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
@fixtures('many_users.json')
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
| import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response = self.test_client.get('/api/users')
assert response.status_code is 200
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
Implement tests for getting empty users list and single user listimport json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 0
assert response.status_code is 200
@fixtures('single_user.json')
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 1
assert response.status_code is 200
@fixtures('many_users.json')
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
@fixtures('many_users.json')
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
@fixtures('many_users.json')
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
| <commit_before>import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response = self.test_client.get('/api/users')
assert response.status_code is 200
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
<commit_msg>Implement tests for getting empty users list and single user list<commit_after>import json
from tests.helpers import FlaskTestCase, fixtures
class TestUserAPI(FlaskTestCase):
@fixtures('base.json')
def test_get_empty_users(self):
"""Test GET /api/users endpoint with no data"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 0
assert response.status_code is 200
@fixtures('single_user.json')
def test_get_one_user(self):
"""Test GET /api/users endpoint with a single user"""
response, data = self.api_request('get', '/api/users')
assert data['num_results'] is 1
assert response.status_code is 200
@fixtures('many_users.json')
def test_get_multiple_users(self):
"""Test GET /api/users endpoint with multple users"""
@fixtures('many_users.json')
def test_get_no_user_by_id(self):
"""Test GET /api/users/(int:id) for missing user"""
@fixtures('many_users.json')
def test_user_by_id(self):
"""Test GET /api/users(int:id) for existing user"""
@fixtures('base.json')
def test_post_user(self):
data = {
'name': 'Giancarlo Anemone',
'username': 'ganemone',
'email': 'ganemone@gmail.com',
'password': 'password',
'confirm': 'password'
}
response = self.app.post(
'/api/users',
data=json.dumps(data)
)
assert response.status_code is 201
|
962f26299a7038879eb1efeb8f16b0801fd9a04a | glitter/assets/apps.py | glitter/assets/apps.py | # -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
| # -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
verbose_name = 'Assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
| Improve verbose name for assets | Improve verbose name for assets
| Python | bsd-3-clause | blancltd/django-glitter,blancltd/django-glitter,developersociety/django-glitter,blancltd/django-glitter,developersociety/django-glitter,developersociety/django-glitter | # -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
Improve verbose name for assets | # -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
verbose_name = 'Assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
| <commit_before># -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
<commit_msg>Improve verbose name for assets<commit_after> | # -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
verbose_name = 'Assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
| # -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
Improve verbose name for assets# -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
verbose_name = 'Assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
| <commit_before># -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
<commit_msg>Improve verbose name for assets<commit_after># -*- coding: utf-8 -*-
from django.apps import AppConfig
class GlitterBasicAssetsConfig(AppConfig):
name = 'glitter.assets'
label = 'glitter_assets'
verbose_name = 'Assets'
def ready(self):
super(GlitterBasicAssetsConfig, self).ready()
from . import listeners # noqa
|
5dfc8a9b759e2106cfa33d13f736b27fddee9079 | influxdb_metrics/loader.py | influxdb_metrics/loader.py | """Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except Exception:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
| """Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except ImportError:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
| Fix to not silently swallow all Exceptions. | Fix to not silently swallow all Exceptions.
Great lib, thanks! | Python | mit | bitmazk/django-influxdb-metrics,bitlabstudio/django-influxdb-metrics,bitlabstudio/django-influxdb-metrics,bitmazk/django-influxdb-metrics | """Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except Exception:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
Fix to not silently swallow all Exceptions.
Great lib, thanks! | """Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except ImportError:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
| <commit_before>"""Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except Exception:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
<commit_msg>Fix to not silently swallow all Exceptions.
Great lib, thanks!<commit_after> | """Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except ImportError:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
| """Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except Exception:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
Fix to not silently swallow all Exceptions.
Great lib, thanks!"""Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except ImportError:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
| <commit_before>"""Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except Exception:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
<commit_msg>Fix to not silently swallow all Exceptions.
Great lib, thanks!<commit_after>"""Loads celery or non-celery version."""
from django.conf import settings
try:
from .tasks import write_points as write_points_celery
except ImportError:
write_points_celery = None
from .utils import write_points as write_points_normal
write_points = None
if getattr(settings, 'INFLUXDB_USE_CELERY', False):
write_points = write_points_celery.delay
else:
write_points = write_points_normal
|
28f504dccd02046604761e997f929015a285dffd | pyQuantuccia/tests/test_get_holiday_date.py | pyQuantuccia/tests/test_get_holiday_date.py | from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
| from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
| Add some bogus tests to try and get this info. | Add some bogus tests to try and get this info.
| Python | bsd-3-clause | jwg4/pyQuantuccia,jwg4/pyQuantuccia | from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
Add some bogus tests to try and get this info. | from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
| <commit_before>from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
<commit_msg>Add some bogus tests to try and get this info.<commit_after> | from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
| from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
Add some bogus tests to try and get this info.from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
| <commit_before>from datetime import date
import calendar
print(calendar.__dir__())
print(calendar.__dict__)
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
<commit_msg>Add some bogus tests to try and get this info.<commit_after>from datetime import date
import calendar
def test_foo():
assert(calendar.__dir__() == "")
def test_dummy():
assert(calendar.__dict__ == "")
def test_united_kingdom_is_business_day():
""" Check a single day to see that we
can identify holidays.
"""
assert(calendar.united_kingdom_is_business_day(date(2017, 4, 17)) is False)
|
fea4f04abc18b8dcf4970a1f338a8d610f04260d | src/pytz/tests/test_docs.py | src/pytz/tests/test_docs.py | #!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
| #!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
| Add a test_suite method as used by the Zope3 test runner | Add a test_suite method as used by the Zope3 test runner | Python | mit | stub42/pytz,stub42/pytz,stub42/pytz,stub42/pytz | #!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
Add a test_suite method as used by the Zope3 test runner | #!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
| <commit_before>#!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
<commit_msg>Add a test_suite method as used by the Zope3 test runner<commit_after> | #!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
| #!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
Add a test_suite method as used by the Zope3 test runner#!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
| <commit_before>#!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
if __name__ == '__main__':
unittest.main(defaultTest='README')
<commit_msg>Add a test_suite method as used by the Zope3 test runner<commit_after>#!/usr/bin/env python
# -*- coding: ascii -*-
import unittest, os, os.path, sys
from doctest import DocFileSuite
sys.path.insert(0, os.path.join(os.pardir, os.pardir))
locs = [
'README.txt',
os.path.join(os.pardir, 'README.txt'),
os.path.join(os.pardir, os.pardir, 'README.txt'),
]
README = None
for loc in locs:
if os.path.exists(loc):
README = DocFileSuite(loc)
break
if README is None:
raise RuntimeError("Can't locate README.txt")
def test_suite():
return README
if __name__ == '__main__':
unittest.main(defaultTest='README')
|
c3c0bf614e046f4640d123f801739fc7ea0d7cac | salt/states/salt_proxy.py | salt/states/salt_proxy.py | # -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
| # -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
Example:
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
| Add example to function docstring | Add example to function docstring
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | # -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
Add example to function docstring | # -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
Example:
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
| <commit_before># -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
<commit_msg>Add example to function docstring<commit_after> | # -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
Example:
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
| # -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
Add example to function docstring# -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
Example:
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
| <commit_before># -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
<commit_msg>Add example to function docstring<commit_after># -*- coding: utf-8 -*-
'''
Salt proxy state
.. versionadded:: 2015.8.2
State to deploy and run salt-proxy processes
on a minion.
Set up pillar data for your proxies per the documentation.
Run the state as below
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
This state will configure the salt proxy settings
within /etc/salt/proxy (if /etc/salt/proxy doesn't exists)
and start the salt-proxy process (default true),
if it isn't already running.
'''
from __future__ import absolute_import
import logging
log = logging.getLogger(__name__)
def configure_proxy(name, proxyname='p8000', start=True):
'''
Create the salt proxy file and start the proxy process
if required
Parameters:
name:
The name of this state
proxyname:
Name to be used for this proxy (should match entries in pillar)
start:
Boolean indicating if the process should be started
Example:
..code-block:: yaml
salt-proxy-configure:
salt_proxy.configure_proxy:
- proxyname: p8000
- start: True
'''
ret = __salt__['salt_proxy.configure_proxy'](proxyname,
start=start)
ret.update({
'name': name,
'comment': '{0} config messages'.format(name)
})
return ret
|
dd63ef92e14a3111fd0914e9994aaea9ebd4e668 | hcalendar/hcalendar.py | hcalendar/hcalendar.py | import bs4
from vcalendar import vCalendar
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
self._soup = bs4.BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
| from vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
| Allow BeautifulSoup object being passed into hCalendar | Allow BeautifulSoup object being passed into hCalendar
| Python | mit | mback2k/python-hcalendar | import bs4
from vcalendar import vCalendar
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
self._soup = bs4.BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
Allow BeautifulSoup object being passed into hCalendar | from vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
| <commit_before>import bs4
from vcalendar import vCalendar
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
self._soup = bs4.BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
<commit_msg>Allow BeautifulSoup object being passed into hCalendar<commit_after> | from vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
| import bs4
from vcalendar import vCalendar
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
self._soup = bs4.BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
Allow BeautifulSoup object being passed into hCalendarfrom vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
| <commit_before>import bs4
from vcalendar import vCalendar
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
self._soup = bs4.BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
<commit_msg>Allow BeautifulSoup object being passed into hCalendar<commit_after>from vcalendar import vCalendar
from bs4 import BeautifulSoup
class hCalendar(object):
def __init__(self, markup, value=None, key='id'):
if isinstance(markup, BeautifulSoup):
self._soup = markup
else:
self._soup = BeautifulSoup(markup)
if value:
self._soup = self._soup.find(**{key: value})
self._cals = self._soup.findAll(attrs='vcalendar')
if self._cals:
self._cals = map(vCalendar, self._cals)
else:
self._cals = [vCalendar(self._soup)]
def __len__(self):
return len(self._cals)
def __iter__(self):
return iter(self._cals)
def __getitem__(self, key):
return self._cals[key]
def getCalendar(self):
return self._cals
|
a773b1aa98a96b6335b76fc587ea714e5cca7545 | cle/relocations/mips.py | cle/relocations/mips.py | from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
| from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
R_MIPS_JUMP_SLOT = generic.GenericAbsoluteReloc
R_MIPS_GLOB_DAT = generic.GenericAbsoluteReloc
| Add R_MIPS_JUMP_SLOT and R_MIPS_GLOB_DAT relocations | Add R_MIPS_JUMP_SLOT and R_MIPS_GLOB_DAT relocations
| Python | bsd-2-clause | chubbymaggie/cle,angr/cle | from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
Add R_MIPS_JUMP_SLOT and R_MIPS_GLOB_DAT relocations | from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
R_MIPS_JUMP_SLOT = generic.GenericAbsoluteReloc
R_MIPS_GLOB_DAT = generic.GenericAbsoluteReloc
| <commit_before>from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
<commit_msg>Add R_MIPS_JUMP_SLOT and R_MIPS_GLOB_DAT relocations<commit_after> | from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
R_MIPS_JUMP_SLOT = generic.GenericAbsoluteReloc
R_MIPS_GLOB_DAT = generic.GenericAbsoluteReloc
| from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
Add R_MIPS_JUMP_SLOT and R_MIPS_GLOB_DAT relocationsfrom . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
R_MIPS_JUMP_SLOT = generic.GenericAbsoluteReloc
R_MIPS_GLOB_DAT = generic.GenericAbsoluteReloc
| <commit_before>from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
<commit_msg>Add R_MIPS_JUMP_SLOT and R_MIPS_GLOB_DAT relocations<commit_after>from . import generic
arch = 'MIPS32'
R_MIPS_32 = generic.GenericAbsoluteAddendReloc
R_MIPS_REL32 = generic.GenericRelativeReloc
R_MIPS_TLS_DTPMOD32 = generic.GenericTLSModIdReloc
R_MIPS_TLS_TPREL32 = generic.GenericTLSOffsetReloc
R_MIPS_TLS_DTPREL32 = generic.GenericTLSDoffsetReloc
R_MIPS_JUMP_SLOT = generic.GenericAbsoluteReloc
R_MIPS_GLOB_DAT = generic.GenericAbsoluteReloc
|
5863cbf81156074df4e0a9abb7a823a7701933da | tlsenum/__init__.py | tlsenum/__init__.py | import click
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
pass
| import socket
import click
from construct import UBInt16
from tlsenum.parse_hello import (
ClientHello, Extensions, HandshakeFailure, ServerHello
)
from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
def send_client_hello(host, port, data):
"""
Sends a ClientHello message in bytes.
Returns a ServerHello message in bytes
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(data)
server_hello = s.recv(5)
server_hello += s.recv(UBInt16("length").parse(server_hello[3:5]))
return server_hello
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
cipher_suites_list = [i.name for i in CipherSuites]
extension = Extensions()
extension.sni = host
extension.ec_curves = [i.name for i in ECCurves]
extension.ec_point_format = [i.name for i in ECPointFormat]
client_hello = ClientHello()
client_hello.protocol_version = "1.2"
client_hello.deflate = False
client_hello.extensions = extension.build()
supported_cipher_suites = []
while True:
client_hello.cipher_suites = cipher_suites_list
server_hello = send_client_hello(host, port, client_hello.build())
try:
server_hello = ServerHello.parse_server_hello(server_hello)
except HandshakeFailure:
break
supported_cipher_suites.append(server_hello.cipher_suite)
cipher_suites_list.remove(server_hello.cipher_suite)
for i in supported_cipher_suites:
print(i)
| Add very basic logic to figure out supported cipher suites. | Add very basic logic to figure out supported cipher suites.
| Python | mit | Ayrx/tlsenum,Ayrx/tlsenum | import click
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
pass
Add very basic logic to figure out supported cipher suites. | import socket
import click
from construct import UBInt16
from tlsenum.parse_hello import (
ClientHello, Extensions, HandshakeFailure, ServerHello
)
from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
def send_client_hello(host, port, data):
"""
Sends a ClientHello message in bytes.
Returns a ServerHello message in bytes
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(data)
server_hello = s.recv(5)
server_hello += s.recv(UBInt16("length").parse(server_hello[3:5]))
return server_hello
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
cipher_suites_list = [i.name for i in CipherSuites]
extension = Extensions()
extension.sni = host
extension.ec_curves = [i.name for i in ECCurves]
extension.ec_point_format = [i.name for i in ECPointFormat]
client_hello = ClientHello()
client_hello.protocol_version = "1.2"
client_hello.deflate = False
client_hello.extensions = extension.build()
supported_cipher_suites = []
while True:
client_hello.cipher_suites = cipher_suites_list
server_hello = send_client_hello(host, port, client_hello.build())
try:
server_hello = ServerHello.parse_server_hello(server_hello)
except HandshakeFailure:
break
supported_cipher_suites.append(server_hello.cipher_suite)
cipher_suites_list.remove(server_hello.cipher_suite)
for i in supported_cipher_suites:
print(i)
| <commit_before>import click
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
pass
<commit_msg>Add very basic logic to figure out supported cipher suites.<commit_after> | import socket
import click
from construct import UBInt16
from tlsenum.parse_hello import (
ClientHello, Extensions, HandshakeFailure, ServerHello
)
from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
def send_client_hello(host, port, data):
"""
Sends a ClientHello message in bytes.
Returns a ServerHello message in bytes
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(data)
server_hello = s.recv(5)
server_hello += s.recv(UBInt16("length").parse(server_hello[3:5]))
return server_hello
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
cipher_suites_list = [i.name for i in CipherSuites]
extension = Extensions()
extension.sni = host
extension.ec_curves = [i.name for i in ECCurves]
extension.ec_point_format = [i.name for i in ECPointFormat]
client_hello = ClientHello()
client_hello.protocol_version = "1.2"
client_hello.deflate = False
client_hello.extensions = extension.build()
supported_cipher_suites = []
while True:
client_hello.cipher_suites = cipher_suites_list
server_hello = send_client_hello(host, port, client_hello.build())
try:
server_hello = ServerHello.parse_server_hello(server_hello)
except HandshakeFailure:
break
supported_cipher_suites.append(server_hello.cipher_suite)
cipher_suites_list.remove(server_hello.cipher_suite)
for i in supported_cipher_suites:
print(i)
| import click
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
pass
Add very basic logic to figure out supported cipher suites.import socket
import click
from construct import UBInt16
from tlsenum.parse_hello import (
ClientHello, Extensions, HandshakeFailure, ServerHello
)
from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
def send_client_hello(host, port, data):
"""
Sends a ClientHello message in bytes.
Returns a ServerHello message in bytes
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(data)
server_hello = s.recv(5)
server_hello += s.recv(UBInt16("length").parse(server_hello[3:5]))
return server_hello
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
cipher_suites_list = [i.name for i in CipherSuites]
extension = Extensions()
extension.sni = host
extension.ec_curves = [i.name for i in ECCurves]
extension.ec_point_format = [i.name for i in ECPointFormat]
client_hello = ClientHello()
client_hello.protocol_version = "1.2"
client_hello.deflate = False
client_hello.extensions = extension.build()
supported_cipher_suites = []
while True:
client_hello.cipher_suites = cipher_suites_list
server_hello = send_client_hello(host, port, client_hello.build())
try:
server_hello = ServerHello.parse_server_hello(server_hello)
except HandshakeFailure:
break
supported_cipher_suites.append(server_hello.cipher_suite)
cipher_suites_list.remove(server_hello.cipher_suite)
for i in supported_cipher_suites:
print(i)
| <commit_before>import click
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
pass
<commit_msg>Add very basic logic to figure out supported cipher suites.<commit_after>import socket
import click
from construct import UBInt16
from tlsenum.parse_hello import (
ClientHello, Extensions, HandshakeFailure, ServerHello
)
from tlsenum.mappings import CipherSuites, ECCurves, ECPointFormat
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
def send_client_hello(host, port, data):
"""
Sends a ClientHello message in bytes.
Returns a ServerHello message in bytes
"""
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
s.send(data)
server_hello = s.recv(5)
server_hello += s.recv(UBInt16("length").parse(server_hello[3:5]))
return server_hello
@click.command(context_settings=CONTEXT_SETTINGS)
@click.argument("host", type=click.STRING)
@click.argument("port", type=click.INT)
@click.option("--verify-cert", is_flag=True)
def cli(host, port, verify_cert):
"""
A command line tool to enumerate TLS cipher-suites supported by a server.
"""
cipher_suites_list = [i.name for i in CipherSuites]
extension = Extensions()
extension.sni = host
extension.ec_curves = [i.name for i in ECCurves]
extension.ec_point_format = [i.name for i in ECPointFormat]
client_hello = ClientHello()
client_hello.protocol_version = "1.2"
client_hello.deflate = False
client_hello.extensions = extension.build()
supported_cipher_suites = []
while True:
client_hello.cipher_suites = cipher_suites_list
server_hello = send_client_hello(host, port, client_hello.build())
try:
server_hello = ServerHello.parse_server_hello(server_hello)
except HandshakeFailure:
break
supported_cipher_suites.append(server_hello.cipher_suite)
cipher_suites_list.remove(server_hello.cipher_suite)
for i in supported_cipher_suites:
print(i)
|
f6d54387a620020e3766ca9f2528b5b4af41e9c1 | tests/unit/tornado_tests.py | tests/unit/tornado_tests.py | """
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance())
| """
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance(), False)
| Revert "Fixed check in test_tornado_connection_call_parent." accidentally committed to wrong branch | Revert "Fixed check in test_tornado_connection_call_parent." accidentally committed to wrong branch
This reverts commit 3ca1365eafa63712e6d0dca7f5d6aa134f05d580.
| Python | bsd-3-clause | pika/pika,vitaly-krugl/pika | """
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance())
Revert "Fixed check in test_tornado_connection_call_parent." accidentally committed to wrong branch
This reverts commit 3ca1365eafa63712e6d0dca7f5d6aa134f05d580. | """
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance(), False)
| <commit_before>"""
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance())
<commit_msg>Revert "Fixed check in test_tornado_connection_call_parent." accidentally committed to wrong branch
This reverts commit 3ca1365eafa63712e6d0dca7f5d6aa134f05d580.<commit_after> | """
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance(), False)
| """
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance())
Revert "Fixed check in test_tornado_connection_call_parent." accidentally committed to wrong branch
This reverts commit 3ca1365eafa63712e6d0dca7f5d6aa134f05d580."""
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance(), False)
| <commit_before>"""
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance())
<commit_msg>Revert "Fixed check in test_tornado_connection_call_parent." accidentally committed to wrong branch
This reverts commit 3ca1365eafa63712e6d0dca7f5d6aa134f05d580.<commit_after>"""
Tests for pika.adapters.tornado_connection
"""
import unittest
import mock
from pika.adapters import tornado_connection
class TornadoConnectionTests(unittest.TestCase):
@mock.patch('pika.adapters.base_connection.BaseConnection.__init__')
def test_tornado_connection_call_parent(self, mock_init):
obj = tornado_connection.TornadoConnection()
mock_init.assert_called_once_with(
None, None, None, None,
tornado_connection.ioloop.IOLoop.instance(), False)
|
157d427646ccee414503089e7080b92335848803 | floq/helpers.py | floq/helpers.py | def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
if num > cutoff:
return (num % n)-n+cutoff
if num < -cutoff:
return -(-num % n)+n+cutoff
else:
return num+cutoff
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
| def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
| Implement rollover correctly and a lot more concisely | Implement rollover correctly and a lot more concisely
| Python | mit | sirmarcel/floq | def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
if num > cutoff:
return (num % n)-n+cutoff
if num < -cutoff:
return -(-num % n)+n+cutoff
else:
return num+cutoff
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
Implement rollover correctly and a lot more concisely | def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
| <commit_before>def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
if num > cutoff:
return (num % n)-n+cutoff
if num < -cutoff:
return -(-num % n)+n+cutoff
else:
return num+cutoff
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
<commit_msg>Implement rollover correctly and a lot more concisely<commit_after> | def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
| def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
if num > cutoff:
return (num % n)-n+cutoff
if num < -cutoff:
return -(-num % n)+n+cutoff
else:
return num+cutoff
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
Implement rollover correctly and a lot more conciselydef n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
| <commit_before>def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
if num > cutoff:
return (num % n)-n+cutoff
if num < -cutoff:
return -(-num % n)+n+cutoff
else:
return num+cutoff
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
<commit_msg>Implement rollover correctly and a lot more concisely<commit_after>def n_to_i(num, n):
"""
Translate num, ranging from
-(n-1)/2 through (n-1)/2
into an index i from 0 to n-1
If num > (n-1)/2, map it into the interval
This is necessary to translate from a physical
Fourier mode number to an index in an array.
"""
cutoff = (n-1)/2
return (num+cutoff) % n
def i_to_n(i, n):
"""
Translate index i, ranging from 0 to n-1
into a number from -(n-1)/2 through (n-1)/2
This is necessary to translate from an index to a physical
Fourier mode number.
"""
cutoff = (n-1)/2
return i-cutoff
|
610ada5f26d7421c2b0dd16a8a14ac9c95e4ed8c | config/test/__init__.py | config/test/__init__.py | from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
| from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
cmd = shlex.split(env.get('TEST_COMMAND'))
print('Executing:', cmd)
sys.exit(subprocess.call(cmd))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
if not python: python = 'python'
if env['PLATFORM'] == 'win32': python = python.replace('\\', '\\\\')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
| Fix "test" target in Windows | Fix "test" target in Windows
| Python | lgpl-2.1 | CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang | from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
Fix "test" target in Windows | from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
cmd = shlex.split(env.get('TEST_COMMAND'))
print('Executing:', cmd)
sys.exit(subprocess.call(cmd))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
if not python: python = 'python'
if env['PLATFORM'] == 'win32': python = python.replace('\\', '\\\\')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
| <commit_before>from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
<commit_msg>Fix "test" target in Windows<commit_after> | from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
cmd = shlex.split(env.get('TEST_COMMAND'))
print('Executing:', cmd)
sys.exit(subprocess.call(cmd))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
if not python: python = 'python'
if env['PLATFORM'] == 'win32': python = python.replace('\\', '\\\\')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
| from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
Fix "test" target in Windowsfrom SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
cmd = shlex.split(env.get('TEST_COMMAND'))
print('Executing:', cmd)
sys.exit(subprocess.call(cmd))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
if not python: python = 'python'
if env['PLATFORM'] == 'win32': python = python.replace('\\', '\\\\')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
| <commit_before>from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
<commit_msg>Fix "test" target in Windows<commit_after>from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
cmd = shlex.split(env.get('TEST_COMMAND'))
print('Executing:', cmd)
sys.exit(subprocess.call(cmd))
def generate(env):
import os
import distutils.spawn
python = distutils.spawn.find_executable('python3')
if not python: python = distutils.spawn.find_executable('python')
if not python: python = distutils.spawn.find_executable('python2')
if not python: python = 'python'
if env['PLATFORM'] == 'win32': python = python.replace('\\', '\\\\')
cmd = python + ' tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigureCB(run_tests)
def exists(): return 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.