commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
977b2ac4baa8f8beaad860ae115c1f884ee5102b | src/plugins/python/python/python_plugin2.py | src/plugins/python/python/python_plugin2.py | class ElektraPlugin(object):
def __init__(self):
self.x = 1
pass
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
| class ElektraPlugin(object):
def __init__(self):
self.x = 1
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
| Remove unnecessary statement from example | Python: Remove unnecessary statement from example
| Python | bsd-3-clause | petermax2/libelektra,ElektraInitiative/libelektra,petermax2/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,petermax2/libelektra,petermax2/libelektra,BernhardDenner/libelektra,BernhardDenner/libelektra,petermax2/libelektra,ElektraInitiative/libelektra,ElektraInitiative/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,petermax2/libelektra,petermax2/libelektra,mpranj/libelektra,BernhardDenner/libelektra,BernhardDenner/libelektra,BernhardDenner/libelektra,petermax2/libelektra,mpranj/libelektra,ElektraInitiative/libelektra,petermax2/libelektra,mpranj/libelektra,mpranj/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,mpranj/libelektra,mpranj/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra,mpranj/libelektra,BernhardDenner/libelektra,ElektraInitiative/libelektra | class ElektraPlugin(object):
def __init__(self):
self.x = 1
pass
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
Python: Remove unnecessary statement from example | class ElektraPlugin(object):
def __init__(self):
self.x = 1
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
| <commit_before>class ElektraPlugin(object):
def __init__(self):
self.x = 1
pass
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
<commit_msg>Python: Remove unnecessary statement from example<commit_after> | class ElektraPlugin(object):
def __init__(self):
self.x = 1
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
| class ElektraPlugin(object):
def __init__(self):
self.x = 1
pass
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
Python: Remove unnecessary statement from exampleclass ElektraPlugin(object):
def __init__(self):
self.x = 1
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
| <commit_before>class ElektraPlugin(object):
def __init__(self):
self.x = 1
pass
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
<commit_msg>Python: Remove unnecessary statement from example<commit_after>class ElektraPlugin(object):
def __init__(self):
self.x = 1
def open(self, config, errorKey):
print("[CLASS-PYTHON-2] open -->")
self.x = self.x + 1
return 1
def get(self, returned, parentKey):
print("[CLASS-PYTHON-2] get")
return 1
def set(self, returned, parentKey):
print("[CLASS-PYTHON-2] set")
return 1
def error(self, returned, parentKey):
print("[CLASS-PYTHON-2] error")
return 1
def close(self, errorKey):
print("[CLASS-PYTHON-2] <-- close")
return 0
|
c38e64dc57ec2fd07d64c638dad81ce9b6079425 | setup/fusion/scripts/Comp/avalon/publish.py | setup/fusion/scripts/Comp/avalon/publish.py | import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion to Deadline)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
| import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
| Remove reference to Deadline in window title | Remove reference to Deadline in window title
| Python | mit | getavalon/core,MoonShineVFX/core,mindbender-studio/core,mindbender-studio/core,getavalon/core,MoonShineVFX/core | import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion to Deadline)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
Remove reference to Deadline in window title | import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
| <commit_before>import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion to Deadline)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
<commit_msg>Remove reference to Deadline in window title<commit_after> | import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
| import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion to Deadline)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
Remove reference to Deadline in window titleimport os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
| <commit_before>import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion to Deadline)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
<commit_msg>Remove reference to Deadline in window title<commit_after>import os
import sys
import avalon.api
import avalon.fusion
import pyblish_qml
def _install_fusion():
from pyblish_qml import settings
import pyblish_qml.host as host
sys.stdout.write("Setting up Pyblish QML in Fusion\n")
if settings.ContextLabel == settings.ContextLabelDefault:
settings.ContextLabel = "Fusion"
if settings.WindowTitle == settings.WindowTitleDefault:
settings.WindowTitle = "Pyblish (Fusion)"
def _set_current_working_dir():
# Set current working directory next to comp
filename = comp.MapPath(comp.GetAttrs()["COMPS_FileName"])
if filename and os.path.exists(filename):
cwd = os.path.dirname(filename)
else:
# Fallback to Avalon projects root
# for unsaved files.
cwd = os.environ["AVALON_PROJECTS"]
os.chdir(cwd)
print("Starting Pyblish setup..")
# Install avalon
avalon.api.install(avalon.fusion)
# force current working directory to NON FUSION path
# os.getcwd will return the binary folder of Fusion in this case
_set_current_working_dir()
# install fusion title
_install_fusion()
# Run QML in modal mode so it keeps listening to the
# server in the main thread and keeps this process
# open until QML finishes.
print("Running publish_qml.show(modal=True)..")
pyblish_qml.show(modal=True)
|
51c0ae7b647a9ea354928f80acbcabef778bedd5 | icekit/page_types/articles/models.py | icekit/page_types/articles/models.py | from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
| from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
| Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order. | Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
The first field should be the one most likely to uniquely identify an
object.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
The first field should be the one most likely to uniquely identify an
object. | from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
| <commit_before>from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
<commit_msg>Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
The first field should be the one most likely to uniquely identify an
object.<commit_after> | from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
| from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
The first field should be the one most likely to uniquely identify an
object.from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
| <commit_before>from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('parent', 'slug', 'publishing_linked'),)
<commit_msg>Update `unique_together`. Order matters. Fields are scanned by PostgreSQL in order.
The first field should be the one most likely to uniquely identify an
object.<commit_after>from django.db import models
from icekit.content_collections.abstract_models import \
AbstractCollectedContent, AbstractListingPage, TitleSlugMixin
from icekit.publishing.models import PublishableFluentContents
class ArticleCategoryPage(AbstractListingPage):
def get_public_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.published().filter(parent_id=unpublished_pk)
def get_visible_items(self):
unpublished_pk = self.get_draft().pk
return Article.objects.visible().filter(parent_id=unpublished_pk)
class Article(
PublishableFluentContents, AbstractCollectedContent, TitleSlugMixin):
parent = models.ForeignKey(
'ArticleCategoryPage',
limit_choices_to={'publishing_is_draft': True},
on_delete=models.PROTECT,
)
class Meta:
unique_together = (('slug', 'parent', 'publishing_linked'), )
|
62233f81247b24a2face81231983ec841ed681ab | src/arduino/arduino_io.py | src/arduino/arduino_io.py | from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogyWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value) | from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)
| Fix analogWrite API naming issue | Fix analogWrite API naming issue
| Python | mit | bucketzxm/pyquark,rli9/pygalileo,rli9/pyquark | from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogyWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)Fix analogWrite API naming issue | from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)
| <commit_before>from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogyWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)<commit_msg>Fix analogWrite API naming issue<commit_after> | from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)
| from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogyWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)Fix analogWrite API naming issuefrom pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)
| <commit_before>from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogyWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)<commit_msg>Fix analogWrite API naming issue<commit_after>from pygalileo.io.dio import Dio
from pygalileo.io.pwmio import Pwmio
'''
Defining Digital Pins modes: INPUT, INPUT_PULLUP, and OUTPUT
'''
INPUT = 'in'
OUTPUT = 'out'
'''
Defining Pin Levels: HIGH and LOW
'''
HIGH = 1
LOW = 0
_DIOS = {}
def pinMode(pin, mode):
if pin not in _DIOS:
dio = Dio(pin)
dio.direction(mode)
_DIOS[pin] = dio
else:
dio = _DIOS[pin]
dio.direction(mode)
def digitalWrite(pin, value):
dio = _DIOS[pin]
dio.value(value)
def digitalRead(pin):
dio = _DIOS[pin]
return dio.value()
def analogWrite(pin, value):
dio = _DIOS[pin]
return dio.pwm(value)
|
e1aa1379eb9ac6c550537c95f6b949a2d456d7c4 | demo/demo/models.py | demo/demo/models.py | from django.db import models
from django.contrib.auth.models import User
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
god = models.ForeignKey(User, related_name='worlds')
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
| from django.db import models
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
| Remove relation to user model in order to keep things simpler | Remove relation to user model in order to keep things simpler
| Python | mit | novafloss/populous | from django.db import models
from django.contrib.auth.models import User
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
god = models.ForeignKey(User, related_name='worlds')
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
Remove relation to user model in order to keep things simpler | from django.db import models
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
| <commit_before>from django.db import models
from django.contrib.auth.models import User
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
god = models.ForeignKey(User, related_name='worlds')
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
<commit_msg>Remove relation to user model in order to keep things simpler<commit_after> | from django.db import models
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
| from django.db import models
from django.contrib.auth.models import User
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
god = models.ForeignKey(User, related_name='worlds')
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
Remove relation to user model in order to keep things simplerfrom django.db import models
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
| <commit_before>from django.db import models
from django.contrib.auth.models import User
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
god = models.ForeignKey(User, related_name='worlds')
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
<commit_msg>Remove relation to user model in order to keep things simpler<commit_after>from django.db import models
ABODE_TYPES = (
('SH', 'Small house'),
('H', 'House'),
('SB', 'Small building'),
('B', 'Building')
)
GENDERS = (
('M', 'Male'),
('F', 'Female')
)
class World(models.Model):
name = models.CharField(max_length=30)
description = models.TextField()
class City(models.Model):
world = models.ForeignKey(World, related_name='cities')
name = models.CharField(max_length=40)
class Abode(models.Model):
city = models.ForeignKey(City, related_name='abodes')
owner = models.OneToOneField('Citizen', related_name='owned_house')
type = models.CharField(max_length=2, choices=ABODE_TYPES)
class Citizen(models.Model):
abode = models.ForeignKey(Abode, related_name='citizens')
first_name = models.CharField(max_length=20)
middle_name = models.CharField(max_length=20, blank=True, default='')
last_name = models.CharField(max_length=20)
birth = models.DateField()
gender = models.CharField(max_length=1, choices=GENDERS)
class Hobby(models.Model):
name = models.CharField(max_length=40)
citizens = models.ManyToManyField(Citizen)
|
cd9836a3147e13ac511b9c14a3d75e0c7a886eba | viper/interpreter/value.py | viper/interpreter/value.py | from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(self.vals)})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
| from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(map(str, self.vals))})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
| Fix __repl__ implementation for TupleVal | Fix __repl__ implementation for TupleVal
| Python | apache-2.0 | pdarragh/Viper | from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(self.vals)})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
Fix __repl__ implementation for TupleVal | from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(map(str, self.vals))})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
| <commit_before>from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(self.vals)})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
<commit_msg>Fix __repl__ implementation for TupleVal<commit_after> | from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(map(str, self.vals))})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
| from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(self.vals)})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
Fix __repl__ implementation for TupleValfrom .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(map(str, self.vals))})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
| <commit_before>from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(self.vals)})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
<commit_msg>Fix __repl__ implementation for TupleVal<commit_after>from .environment import Environment
from viper.parser.ast.nodes import Expr
class Value:
pass
class TupleVal(Value):
def __init__(self, *vals: Value):
self.vals = list(vals)
def __repr__(self) -> str:
return f"TupleVal({', '.join(map(str, self.vals))})"
class NumVal(Value):
def __init__(self, val: str):
self.val = val
def __repr__(self) -> str:
return f"NumVal({self.val})"
class CloVal(Value):
def __init__(self, name: str, expr: Expr, env: Environment):
self.name = name
self.expr = expr
self.env = env
def __repr__(self) -> str:
return f"CloVal({self.name}, {self.expr}, {self.env})"
class BoolVal(Value):
def __repr__(self) -> str:
return "BoolVal"
class TrueVal(BoolVal):
def __repr__(self) -> str:
return "TrueVal"
class FalseVal(BoolVal):
def __repr__(self) -> str:
return "FalseVal"
class UnitVal(Value):
def __repr__(self) -> str:
return "UnitVal"
class BottomVal(Value):
def __repr__(self) -> str:
return "BottomVal"
class EllipsisVal(Value):
def __repr__(self) -> str:
return "EllipsisVal"
|
00b68bca8f8f1c857aadf5af1cba717fcfa1272e | jsl/registry.py | jsl/registry.py | # coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{}.{}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{}.{}'.format(module, name)
_documents_registry[name] = document_cls
| # coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{0}.{1}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{0}.{1}'.format(module, name)
_documents_registry[name] = document_cls
| Fix incompatibility with Python 2.6 | Fix incompatibility with Python 2.6
| Python | bsd-3-clause | aromanovich/jsl,aromanovich/jsl | # coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{}.{}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{}.{}'.format(module, name)
_documents_registry[name] = document_cls
Fix incompatibility with Python 2.6 | # coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{0}.{1}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{0}.{1}'.format(module, name)
_documents_registry[name] = document_cls
| <commit_before># coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{}.{}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{}.{}'.format(module, name)
_documents_registry[name] = document_cls
<commit_msg>Fix incompatibility with Python 2.6<commit_after> | # coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{0}.{1}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{0}.{1}'.format(module, name)
_documents_registry[name] = document_cls
| # coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{}.{}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{}.{}'.format(module, name)
_documents_registry[name] = document_cls
Fix incompatibility with Python 2.6# coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{0}.{1}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{0}.{1}'.format(module, name)
_documents_registry[name] = document_cls
| <commit_before># coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{}.{}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{}.{}'.format(module, name)
_documents_registry[name] = document_cls
<commit_msg>Fix incompatibility with Python 2.6<commit_after># coding: utf-8
from __future__ import unicode_literals
"""
A registry of all documents.
"""
_documents_registry = {}
def get_document(name, module=None):
if module:
name = '{0}.{1}'.format(module, name)
return _documents_registry[name]
def put_document(name, document_cls, module=None):
if module:
name = '{0}.{1}'.format(module, name)
_documents_registry[name] = document_cls
|
443874df07a3c3ed8d9e075b25e5f93c1de0128b | tests/devices_test/device_packages_test.py | tests/devices_test/device_packages_test.py | # vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertListEqual(packages, list(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
| # vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertEqual(len(packages), len(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
| Use len of set to check for duplicates in list of packages. | Use len of set to check for duplicates in list of packages.
Resolves: #154.
Checking for equality of the two lists was a mistake, since
the order of the list generated from the set is undefined.
| Python | lgpl-2.1 | rhinstaller/blivet,jkonecny12/blivet,AdamWill/blivet,vojtechtrefny/blivet,jkonecny12/blivet,rvykydal/blivet,vpodzime/blivet,rhinstaller/blivet,AdamWill/blivet,rvykydal/blivet,vojtechtrefny/blivet,vpodzime/blivet | # vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertListEqual(packages, list(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
Use len of set to check for duplicates in list of packages.
Resolves: #154.
Checking for equality of the two lists was a mistake, since
the order of the list generated from the set is undefined. | # vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertEqual(len(packages), len(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
| <commit_before># vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertListEqual(packages, list(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
<commit_msg>Use len of set to check for duplicates in list of packages.
Resolves: #154.
Checking for equality of the two lists was a mistake, since
the order of the list generated from the set is undefined.<commit_after> | # vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertEqual(len(packages), len(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
| # vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertListEqual(packages, list(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
Use len of set to check for duplicates in list of packages.
Resolves: #154.
Checking for equality of the two lists was a mistake, since
the order of the list generated from the set is undefined.# vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertEqual(len(packages), len(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
| <commit_before># vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertListEqual(packages, list(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
<commit_msg>Use len of set to check for duplicates in list of packages.
Resolves: #154.
Checking for equality of the two lists was a mistake, since
the order of the list generated from the set is undefined.<commit_after># vim:set fileencoding=utf-8
import unittest
from blivet.devices import DiskDevice
from blivet.devices import LUKSDevice
from blivet.devices import MDRaidArrayDevice
from blivet.formats import getFormat
class DevicePackagesTestCase(unittest.TestCase):
"""Test device name validation"""
def testPackages(self):
dev1 = DiskDevice("name", fmt=getFormat("mdmember"))
dev2 = DiskDevice("other", fmt=getFormat("mdmember"))
dev = MDRaidArrayDevice("dev", level="raid1", parents=[dev1,dev2])
luks = LUKSDevice("luks", parents=[dev])
packages = luks.packages
# no duplicates in list of packages
self.assertEqual(len(packages), len(set(packages)))
# several packages that ought to be included are
for package in dev1.packages + dev2.packages + dev.packages:
self.assertIn(package, packages)
for package in dev1.format.packages + dev2.format.packages + dev.format.packages:
self.assertIn(package, packages)
|
13cad8b6fb7c484a492333e86a6e774ce4742a40 | src/webassets/filter/uglifyjs.py | src/webassets/filter/uglifyjs.py | """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| Make UglifyJSFilter docstring more consistent with other filters | Make UglifyJSFilter docstring more consistent with other filters
| Python | bsd-2-clause | john2x/webassets,scorphus/webassets,aconrad/webassets,0x1997/webassets,heynemann/webassets,0x1997/webassets,glorpen/webassets,JDeuce/webassets,aconrad/webassets,john2x/webassets,JDeuce/webassets,glorpen/webassets,florianjacob/webassets,heynemann/webassets,aconrad/webassets,wijerasa/webassets,florianjacob/webassets,glorpen/webassets,wijerasa/webassets,heynemann/webassets,scorphus/webassets | """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
Make UglifyJSFilter docstring more consistent with other filters | import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| <commit_before>"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
<commit_msg>Make UglifyJSFilter docstring more consistent with other filters<commit_after> | import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| """Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
Make UglifyJSFilter docstring more consistent with other filtersimport subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
| <commit_before>"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
<commit_msg>Make UglifyJSFilter docstring more consistent with other filters<commit_after>import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifyJSFilter',)
class UglifyJSFilter(Filter):
"""
Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
Additional options may be passed to ``uglifyjs`` using the setting
``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
9e39f0ccba89e12eb5ee6269555dae9c854d44f2 | dbaas/dbaas_services/analyzing/integration/analyze.py | dbaas/dbaas_services/analyzing/integration/analyze.py | # -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, data=kwargs)
return response.json()
| # -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, json=kwargs)
return response.json()
| Change data param to json | Change data param to json
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | # -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, data=kwargs)
return response.json()
Change data param to json | # -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, json=kwargs)
return response.json()
| <commit_before># -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, data=kwargs)
return response.json()
<commit_msg>Change data param to json<commit_after> | # -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, json=kwargs)
return response.json()
| # -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, data=kwargs)
return response.json()
Change data param to json# -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, json=kwargs)
return response.json()
| <commit_before># -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, data=kwargs)
return response.json()
<commit_msg>Change data param to json<commit_after># -*- coding: utf-8 -*-
import requests
from dbaas_services.analyzing.exceptions import ServiceNotAvailable
class AnalyzeService(object):
def __init__(self, endpoint, healh_check_route, healh_check_string):
self.endpoint = endpoint
self.healh_check_route = healh_check_route
self.healh_check_string = healh_check_string
if self.__service_is_not__available():
raise ServiceNotAvailable("Service not Working")
def __service_is_not__available(self,):
healh_check_endpoint = self.endpoint + self.healh_check_route
response = requests.get(healh_check_endpoint)
return not response.content == self.healh_check_string
def run(self, **kwargs):
response = requests.post(self.endpoint, json=kwargs)
return response.json()
|
5d0fb37bdb42833b0468c094a3e310015c68be3f | debugtools/__init__.py | debugtools/__init__.py | VERSION = (1, 1, 0)
| VERSION = (1, 1, 0)
# Make sure the ``{% print %}`` is always available, even without a {% load debug_tags %} call.
# **NOTE** this uses the undocumented, unofficial add_to_builtins() call. It's not promoted
# by Django developers because it's better to be explicit with a {% load .. %} in the templates.
#
# This function is used here nevertheless because the {% print %} tag is a debugging aid,
# and not a tag that should remain permanently in your templates. Convenience is preferred here.
#
from django.template.loader import add_to_builtins
add_to_builtins("debugtools.templatetags.debug_tags")
| Make sure {% print %} is always available for you in templates | Make sure {% print %} is always available for you in templates
| Python | apache-2.0 | edoburu/django-debugtools,edoburu/django-debugtools,edoburu/django-debugtools | VERSION = (1, 1, 0)
Make sure {% print %} is always available for you in templates | VERSION = (1, 1, 0)
# Make sure the ``{% print %}`` is always available, even without a {% load debug_tags %} call.
# **NOTE** this uses the undocumented, unofficial add_to_builtins() call. It's not promoted
# by Django developers because it's better to be explicit with a {% load .. %} in the templates.
#
# This function is used here nevertheless because the {% print %} tag is a debugging aid,
# and not a tag that should remain permanently in your templates. Convenience is preferred here.
#
from django.template.loader import add_to_builtins
add_to_builtins("debugtools.templatetags.debug_tags")
| <commit_before>VERSION = (1, 1, 0)
<commit_msg>Make sure {% print %} is always available for you in templates<commit_after> | VERSION = (1, 1, 0)
# Make sure the ``{% print %}`` is always available, even without a {% load debug_tags %} call.
# **NOTE** this uses the undocumented, unofficial add_to_builtins() call. It's not promoted
# by Django developers because it's better to be explicit with a {% load .. %} in the templates.
#
# This function is used here nevertheless because the {% print %} tag is a debugging aid,
# and not a tag that should remain permanently in your templates. Convenience is preferred here.
#
from django.template.loader import add_to_builtins
add_to_builtins("debugtools.templatetags.debug_tags")
| VERSION = (1, 1, 0)
Make sure {% print %} is always available for you in templatesVERSION = (1, 1, 0)
# Make sure the ``{% print %}`` is always available, even without a {% load debug_tags %} call.
# **NOTE** this uses the undocumented, unofficial add_to_builtins() call. It's not promoted
# by Django developers because it's better to be explicit with a {% load .. %} in the templates.
#
# This function is used here nevertheless because the {% print %} tag is a debugging aid,
# and not a tag that should remain permanently in your templates. Convenience is preferred here.
#
from django.template.loader import add_to_builtins
add_to_builtins("debugtools.templatetags.debug_tags")
| <commit_before>VERSION = (1, 1, 0)
<commit_msg>Make sure {% print %} is always available for you in templates<commit_after>VERSION = (1, 1, 0)
# Make sure the ``{% print %}`` is always available, even without a {% load debug_tags %} call.
# **NOTE** this uses the undocumented, unofficial add_to_builtins() call. It's not promoted
# by Django developers because it's better to be explicit with a {% load .. %} in the templates.
#
# This function is used here nevertheless because the {% print %} tag is a debugging aid,
# and not a tag that should remain permanently in your templates. Convenience is preferred here.
#
from django.template.loader import add_to_builtins
add_to_builtins("debugtools.templatetags.debug_tags")
|
2ef671ca19f237ab0bf3fcc632048b34a2c5d3dc | tutorials/models.py | tutorials/models.py | from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
| from django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
| Add options for choices fields, Add new fields to Tutorial model | Add options for choices fields, Add new fields to Tutorial model
| Python | agpl-3.0 | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform | from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
Add options for choices fields, Add new fields to Tutorial model | from django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
| <commit_before>from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
<commit_msg>Add options for choices fields, Add new fields to Tutorial model<commit_after> | from django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
| from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
Add options for choices fields, Add new fields to Tutorial modelfrom django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
| <commit_before>from django.db import models
from markdownx.models import MarkdownxField
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
# Category = models.TextField()
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
# Level = models.IntegerField()
<commit_msg>Add options for choices fields, Add new fields to Tutorial model<commit_after>from django.db import models
from django.urls import reverse
from markdownx.models import MarkdownxField
# add options if needed
CATEGORY_OPTIONS = [('io', 'I/O'), ('intro', 'Introduction')]
LEVEL_OPTIONS = [(1, '1'), (2, '2'), (3, '3')]
# Create your models here.
class Tutorial(models.Model):
# ToDo: Fields that are out-commented are missing according to the mockup -> datamodel ??
category = models.CharField(max_length=15, choices=CATEGORY_OPTIONS, blank=True)
title = models.TextField()
html = models.TextField()
markdown = MarkdownxField()
level = models.IntegerField(choices=LEVEL_OPTIONS, null=True)
def get_absolute_url (self):
return reverse('detail_tutorial', args=[self.id])
|
141a75f910d239aa7a593f9edda53fd49a02f8c4 | digitalmanifesto/views.py | digitalmanifesto/views.py | from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join(key, LETSENCRYPT_SECRET)
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html' | from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join((key, LETSENCRYPT_SECRET))
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html' | Fix typo in acme_challenge view (str.join() takes a single iterable, not *args) | Fix typo in acme_challenge view (str.join() takes a single iterable, not *args)
| Python | mit | gwhigs/digital-manifesto,gwhigs/digital-manifesto,gwhigs/digital-manifesto,gwhigs/digital-manifesto | from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join(key, LETSENCRYPT_SECRET)
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html'Fix typo in acme_challenge view (str.join() takes a single iterable, not *args) | from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join((key, LETSENCRYPT_SECRET))
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html' | <commit_before>from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join(key, LETSENCRYPT_SECRET)
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html'<commit_msg>Fix typo in acme_challenge view (str.join() takes a single iterable, not *args)<commit_after> | from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join((key, LETSENCRYPT_SECRET))
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html' | from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join(key, LETSENCRYPT_SECRET)
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html'Fix typo in acme_challenge view (str.join() takes a single iterable, not *args)from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join((key, LETSENCRYPT_SECRET))
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html' | <commit_before>from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join(key, LETSENCRYPT_SECRET)
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html'<commit_msg>Fix typo in acme_challenge view (str.join() takes a single iterable, not *args)<commit_after>from __future__ import absolute_import, unicode_literals
from django.http import HttpResponse
from django.views.generic import TemplateView
from manifestos.models import Manifesto, Collection
LETSENCRYPT_SECRET = 'RoqK1ZHN6384upsmMKbrJuxqaGNKcmJc5JApOy8qi8Y'
def acme_challenge(request, key):
resp = '.'.join((key, LETSENCRYPT_SECRET))
return HttpResponse(resp)
class IndexView(TemplateView):
template_name = 'index.html'
def get_context_data(self, **kwargs):
context = super(IndexView, self).get_context_data(**kwargs)
context['collection'] = Collection.objects.get(featured=True)
return context
class AboutTemplateView(TemplateView):
template_name = 'about.html'
class ContactTemplateView(TemplateView):
template_name = 'contact.html'
class NewsTemplateView(TemplateView):
template_name = 'news.html'
class ProjectsTemplateView(TemplateView):
template_name = 'projects_we_like.html'
class ResourcesTemplateView(TemplateView):
template_name = 'resources.html' |
0524817b152b4e3211d5d8101c661a54578e5888 | dmoj/checkers/standard.py | dmoj/checkers/standard.py | def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
| from ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
| Remove untested checker code path | Remove untested checker code path | Python | agpl-3.0 | DMOJ/judge,DMOJ/judge,DMOJ/judge | def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
Remove untested checker code path | from ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
| <commit_before>def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
<commit_msg>Remove untested checker code path<commit_after> | from ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
| def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
Remove untested checker code pathfrom ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
| <commit_before>def check(process_output, judge_output, **kwargs):
from six.moves import zip
process_lines = list(filter(None, process_output.split(b'\n')))
judge_lines = list(filter(None, judge_output.split(b'\n')))
if len(process_lines) != len(judge_lines):
return False
for process_line, judge_line in zip(process_lines, judge_lines):
if process_line.split() != judge_line.split():
return False
return True
try:
from ._checker import standard
except ImportError as e:
pass
else:
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
<commit_msg>Remove untested checker code path<commit_after>from ._checker import standard
def check(process_output, judge_output, _checker=standard, **kwargs):
return _checker(judge_output, process_output)
del standard
|
e98a098ac6a21b0192771fd3a8d5c48468cd4340 | pymatgen/phasediagram/__init__.py | pymatgen/phasediagram/__init__.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
| # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
from .maker import PhaseDiagram, GrandPotentialPhaseDiagram, CompoundPhaseDiagram
from .analyzer import PDAnalyzer
from .plotter import PDPlotter | Add quick aliases to PD. | Add quick aliases to PD.
Former-commit-id: 6a0680d54cc1d391a351f4d5e8ff72f696d303db [formerly 5fe981c7ed92d45548d3f7ab6abb38d149d0ada2]
Former-commit-id: f76e0dc538c182b4978eb54b51cbebafa257ce04 | Python | mit | aykol/pymatgen,tschaume/pymatgen,Bismarrck/pymatgen,setten/pymatgen,fraricci/pymatgen,Bismarrck/pymatgen,gVallverdu/pymatgen,richardtran415/pymatgen,johnson1228/pymatgen,davidwaroquiers/pymatgen,gpetretto/pymatgen,tallakahath/pymatgen,gpetretto/pymatgen,gVallverdu/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen,setten/pymatgen,johnson1228/pymatgen,blondegeek/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,blondegeek/pymatgen,setten/pymatgen,gpetretto/pymatgen,gmatteo/pymatgen,Bismarrck/pymatgen,richardtran415/pymatgen,montoyjh/pymatgen,setten/pymatgen,mbkumar/pymatgen,mbkumar/pymatgen,vorwerkc/pymatgen,blondegeek/pymatgen,davidwaroquiers/pymatgen,ndardenne/pymatgen,tschaume/pymatgen,czhengsci/pymatgen,gpetretto/pymatgen,mbkumar/pymatgen,Bismarrck/pymatgen,xhqu1981/pymatgen,vorwerkc/pymatgen,montoyjh/pymatgen,tallakahath/pymatgen,johnson1228/pymatgen,richardtran415/pymatgen,dongsenfo/pymatgen,montoyjh/pymatgen,johnson1228/pymatgen,czhengsci/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,czhengsci/pymatgen,aykol/pymatgen,dongsenfo/pymatgen,vorwerkc/pymatgen,tschaume/pymatgen,gVallverdu/pymatgen,dongsenfo/pymatgen,Bismarrck/pymatgen,dongsenfo/pymatgen,matk86/pymatgen,czhengsci/pymatgen,matk86/pymatgen,nisse3000/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,gVallverdu/pymatgen,xhqu1981/pymatgen,fraricci/pymatgen,aykol/pymatgen,matk86/pymatgen,gmatteo/pymatgen,vorwerkc/pymatgen,nisse3000/pymatgen,mbkumar/pymatgen,xhqu1981/pymatgen,nisse3000/pymatgen,montoyjh/pymatgen,fraricci/pymatgen,tallakahath/pymatgen,nisse3000/pymatgen,blondegeek/pymatgen | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
Add quick aliases to PD.
Former-commit-id: 6a0680d54cc1d391a351f4d5e8ff72f696d303db [formerly 5fe981c7ed92d45548d3f7ab6abb38d149d0ada2]
Former-commit-id: f76e0dc538c182b4978eb54b51cbebafa257ce04 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
from .maker import PhaseDiagram, GrandPotentialPhaseDiagram, CompoundPhaseDiagram
from .analyzer import PDAnalyzer
from .plotter import PDPlotter | <commit_before># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
<commit_msg>Add quick aliases to PD.
Former-commit-id: 6a0680d54cc1d391a351f4d5e8ff72f696d303db [formerly 5fe981c7ed92d45548d3f7ab6abb38d149d0ada2]
Former-commit-id: f76e0dc538c182b4978eb54b51cbebafa257ce04<commit_after> | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
from .maker import PhaseDiagram, GrandPotentialPhaseDiagram, CompoundPhaseDiagram
from .analyzer import PDAnalyzer
from .plotter import PDPlotter | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
Add quick aliases to PD.
Former-commit-id: 6a0680d54cc1d391a351f4d5e8ff72f696d303db [formerly 5fe981c7ed92d45548d3f7ab6abb38d149d0ada2]
Former-commit-id: f76e0dc538c182b4978eb54b51cbebafa257ce04# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
from .maker import PhaseDiagram, GrandPotentialPhaseDiagram, CompoundPhaseDiagram
from .analyzer import PDAnalyzer
from .plotter import PDPlotter | <commit_before># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
<commit_msg>Add quick aliases to PD.
Former-commit-id: 6a0680d54cc1d391a351f4d5e8ff72f696d303db [formerly 5fe981c7ed92d45548d3f7ab6abb38d149d0ada2]
Former-commit-id: f76e0dc538c182b4978eb54b51cbebafa257ce04<commit_after># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
The phasediagram package implements the analysis tools to perform phase
stability analyses, including the constructing of phase diagrams, determination
of decomposition products, etc. The package is designed to be fairly modular
and standalone.
"""
__author__ = "Shyue"
__date__ = "Mar 28 2013"
from .maker import PhaseDiagram, GrandPotentialPhaseDiagram, CompoundPhaseDiagram
from .analyzer import PDAnalyzer
from .plotter import PDPlotter |
4f7e991960c24fc9548f8f3d6d5f8967c2ece84a | numpy/array_api/_typing.py | numpy/array_api/_typing.py | """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
| """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
| Add a missing subscription slot to `NestedSequence` | MAINT: Add a missing subscription slot to `NestedSequence`
| Python | bsd-3-clause | numpy/numpy,anntzer/numpy,numpy/numpy,pdebuyl/numpy,seberg/numpy,rgommers/numpy,anntzer/numpy,charris/numpy,charris/numpy,jakirkham/numpy,charris/numpy,jakirkham/numpy,pdebuyl/numpy,anntzer/numpy,mhvk/numpy,endolith/numpy,mattip/numpy,mattip/numpy,rgommers/numpy,numpy/numpy,mhvk/numpy,mhvk/numpy,rgommers/numpy,charris/numpy,jakirkham/numpy,seberg/numpy,mhvk/numpy,endolith/numpy,endolith/numpy,numpy/numpy,mhvk/numpy,pdebuyl/numpy,seberg/numpy,jakirkham/numpy,rgommers/numpy,mattip/numpy,jakirkham/numpy,endolith/numpy,anntzer/numpy,mattip/numpy,pdebuyl/numpy,seberg/numpy | """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
MAINT: Add a missing subscription slot to `NestedSequence` | """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
| <commit_before>"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
<commit_msg>MAINT: Add a missing subscription slot to `NestedSequence`<commit_after> | """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
| """
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
MAINT: Add a missing subscription slot to `NestedSequence`"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
| <commit_before>"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
NestedSequence = Sequence[Sequence[Any]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
<commit_msg>MAINT: Add a missing subscription slot to `NestedSequence`<commit_after>"""
This file defines the types for type annotations.
These names aren't part of the module namespace, but they are used in the
annotations in the function signatures. The functions in the module are only
valid for inputs that match the given type annotations.
"""
__all__ = [
"Array",
"Device",
"Dtype",
"SupportsDLPack",
"SupportsBufferProtocol",
"PyCapsule",
]
import sys
from typing import Any, Literal, Sequence, Type, Union, TYPE_CHECKING, TypeVar
from . import Array
from numpy import (
dtype,
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
)
# This should really be recursive, but that isn't supported yet. See the
# similar comment in numpy/typing/_array_like.py
_T = TypeVar("_T")
NestedSequence = Sequence[Sequence[_T]]
Device = Literal["cpu"]
if TYPE_CHECKING or sys.version_info >= (3, 9):
Dtype = dtype[Union[
int8,
int16,
int32,
int64,
uint8,
uint16,
uint32,
uint64,
float32,
float64,
]]
else:
Dtype = dtype
SupportsDLPack = Any
SupportsBufferProtocol = Any
PyCapsule = Any
|
5996401b8d7313437abfc9a52b503e8357927641 | openupgradelib/__init__.py | openupgradelib/__init__.py | # -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.0.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
| # -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.1.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
| Bump version for next release | Bump version for next release
| Python | agpl-3.0 | kurkop/openupgradelib,hbrunn/openupgradelib,StefanRijnhart/openupgradelib,bwrsandman/openupgradelib,sebalix/openupgradelib,OCA/openupgradelib,blaggacao/openupgradelib | # -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.0.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
Bump version for next release | # -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.1.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
| <commit_before># -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.0.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
<commit_msg>Bump version for next release<commit_after> | # -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.1.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
| # -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.0.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
Bump version for next release# -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.1.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
| <commit_before># -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.0.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
<commit_msg>Bump version for next release<commit_after># -*- coding: utf-8 -*-
__author__ = 'Odoo Community Association (OCA)'
__email__ = 'support@odoo-community.org'
__version__ = '1.1.0'
__doc__ = """A library with support functions to be called from Odoo \
migration scripts."""
__license__ = "AGPL-3"
|
226a4c1af180f0bf1924a84c76d1d2b300557e9b | instana/instrumentation/urllib3.py | instana/instrumentation/urllib3.py | from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
| from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
| Make sure to finish span when there is an exception | Make sure to finish span when there is an exception
| Python | mit | instana/python-sensor,instana/python-sensor | from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
Make sure to finish span when there is an exception | from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
| <commit_before>from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
<commit_msg>Make sure to finish span when there is an exception<commit_after> | from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
| from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
Make sure to finish span when there is an exceptionfrom __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
| <commit_before>from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
<commit_msg>Make sure to finish span when there is an exception<commit_after>from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
|
9b8fb2d745af7bb8c69e504a97612727b7e9a2d8 | storlets/gateway/gateways/base.py | storlets/gateway/gateways/base.py | # Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq):
raise NotImplementedError("Not implemented: invocation_flow")
| # Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq, extra_sources=None):
raise NotImplementedError("Not implemented: invocation_flow")
| Use consistent arguments for invocation_flow method | Use consistent arguments for invocation_flow method
Trivial-Fix
Change-Id: I6481147a722b2f366300f2ac7952ebd94f1a2ada
| Python | apache-2.0 | openstack/storlets,openstack/storlets,openstack/storlets,openstack/storlets | # Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq):
raise NotImplementedError("Not implemented: invocation_flow")
Use consistent arguments for invocation_flow method
Trivial-Fix
Change-Id: I6481147a722b2f366300f2ac7952ebd94f1a2ada | # Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq, extra_sources=None):
raise NotImplementedError("Not implemented: invocation_flow")
| <commit_before># Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq):
raise NotImplementedError("Not implemented: invocation_flow")
<commit_msg>Use consistent arguments for invocation_flow method
Trivial-Fix
Change-Id: I6481147a722b2f366300f2ac7952ebd94f1a2ada<commit_after> | # Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq, extra_sources=None):
raise NotImplementedError("Not implemented: invocation_flow")
| # Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq):
raise NotImplementedError("Not implemented: invocation_flow")
Use consistent arguments for invocation_flow method
Trivial-Fix
Change-Id: I6481147a722b2f366300f2ac7952ebd94f1a2ada# Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq, extra_sources=None):
raise NotImplementedError("Not implemented: invocation_flow")
| <commit_before># Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq):
raise NotImplementedError("Not implemented: invocation_flow")
<commit_msg>Use consistent arguments for invocation_flow method
Trivial-Fix
Change-Id: I6481147a722b2f366300f2ac7952ebd94f1a2ada<commit_after># Copyright IBM Corp. 2015, 2015 All Rights Reserved
# Copyright (c) 2010-2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from storlets.gateway.common.stob import StorletRequest
class StorletGatewayBase(object):
request_class = StorletRequest
def __init__(self, conf, logger, scope):
self.logger = logger
self.conf = conf
self.scope = scope
@classmethod
def validate_storlet_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_storlet_registration")
@classmethod
def validate_dependency_registration(cls, params, obj):
raise NotImplementedError("Not implemented: "
"validate_dependency_registration")
def invocation_flow(self, sreq, extra_sources=None):
raise NotImplementedError("Not implemented: invocation_flow")
|
97d2b5b55a6cec3644a323662e52b9b256c18f33 | mdx_linkify/mdx_linkify.py | mdx_linkify/mdx_linkify.py | from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
| from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
| Fix IndexError: pop from empty list | fix: Fix IndexError: pop from empty list
Create Linker instance for each run, to bypass html5lib bugs
Refs #15
| Python | mit | daGrevis/mdx_linkify | from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
fix: Fix IndexError: pop from empty list
Create Linker instance for each run, to bypass html5lib bugs
Refs #15 | from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
| <commit_before>from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
<commit_msg>fix: Fix IndexError: pop from empty list
Create Linker instance for each run, to bypass html5lib bugs
Refs #15<commit_after> | from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
| from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
fix: Fix IndexError: pop from empty list
Create Linker instance for each run, to bypass html5lib bugs
Refs #15from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
| <commit_before>from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker = Linker(**linker_options)
def run(self, text):
return self._linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
<commit_msg>fix: Fix IndexError: pop from empty list
Create Linker instance for each run, to bypass html5lib bugs
Refs #15<commit_after>from bleach.linkifier import Linker
from markdown.postprocessors import Postprocessor
from markdown.extensions import Extension
class LinkifyExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'linker_options': [{}, 'Options for bleach.linkifier.Linker'],
}
super(LinkifyExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.postprocessors.register(
LinkifyPostprocessor(
md,
self.getConfig('linker_options'),
),
"linkify",
50,
)
class LinkifyPostprocessor(Postprocessor):
def __init__(self, md, linker_options):
super(LinkifyPostprocessor, self).__init__(md)
linker_options.setdefault("skip_tags", ["code"])
self._linker_options = linker_options
def run(self, text):
linker = Linker(**self._linker_options)
return linker.linkify(text)
def makeExtension(*args, **kwargs):
return LinkifyExtension(*args, **kwargs)
|
04ac8f763277014a95c9b5559cab6abc11ad9390 | test/integration/test_qt.py | test/integration/test_qt.py | from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'QXcbConnection: Could not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
| from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'[Cc]ould not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
| Fix integration tests on Mint 20 | Fix integration tests on Mint 20
| Python | bsd-3-clause | jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000,jimporter/bfg9000 | from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'QXcbConnection: Could not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
Fix integration tests on Mint 20 | from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'[Cc]ould not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
| <commit_before>from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'QXcbConnection: Could not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
<commit_msg>Fix integration tests on Mint 20<commit_after> | from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'[Cc]ould not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
| from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'QXcbConnection: Could not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
Fix integration tests on Mint 20from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'[Cc]ould not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
| <commit_before>from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'QXcbConnection: Could not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
<commit_msg>Fix integration tests on Mint 20<commit_after>from . import *
# MSBuild backend doesn't support generated_source yet.
@skip_if_backend('msbuild')
class TestQt(IntegrationTest):
def run_executable(self, exe):
if env.host_platform.genus == 'linux':
output = self.assertPopen([exe], extra_env={'DISPLAY': ''},
returncode='fail')
self.assertRegex(output,
r'[Cc]ould not connect to display')
def __init__(self, *args, **kwargs):
env_vars = ({} if env.builder('c++').flavor == 'msvc'
else {'CPPFLAGS': ('-Wno-inconsistent-missing-override ' +
env.getvar('CPPFLAGS', ''))})
super().__init__(os.path.join(examples_dir, '13_qt'),
*args, extra_env=env_vars, **kwargs)
def test_designer(self):
self.build(executable('qt-designer'))
self.run_executable(executable('qt-designer'))
def test_qml(self):
self.build(executable('qt-qml'))
self.run_executable(executable('qt-qml'))
|
b5676fde38b1d7c40bc5873a65e864c3bd3214d9 | heufybot/utils/logutils.py | heufybot/utils/logutils.py | from twisted.python import log, util
import logging
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
| from twisted.python import log, util
import logging, sys, traceback
def logExceptionTrace(error = None):
if error:
log.msg("A Python excecution error occurred:", error, level=logging.ERROR)
log.msg(traceback.format_exc(sys.exc_info()[2]), level=logging.ERROR)
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
| Add a helper function for exception logging | Add a helper function for exception logging
| Python | mit | Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot | from twisted.python import log, util
import logging
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
Add a helper function for exception logging | from twisted.python import log, util
import logging, sys, traceback
def logExceptionTrace(error = None):
if error:
log.msg("A Python excecution error occurred:", error, level=logging.ERROR)
log.msg(traceback.format_exc(sys.exc_info()[2]), level=logging.ERROR)
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
| <commit_before>from twisted.python import log, util
import logging
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
<commit_msg>Add a helper function for exception logging<commit_after> | from twisted.python import log, util
import logging, sys, traceback
def logExceptionTrace(error = None):
if error:
log.msg("A Python excecution error occurred:", error, level=logging.ERROR)
log.msg(traceback.format_exc(sys.exc_info()[2]), level=logging.ERROR)
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
| from twisted.python import log, util
import logging
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
Add a helper function for exception loggingfrom twisted.python import log, util
import logging, sys, traceback
def logExceptionTrace(error = None):
if error:
log.msg("A Python excecution error occurred:", error, level=logging.ERROR)
log.msg(traceback.format_exc(sys.exc_info()[2]), level=logging.ERROR)
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
| <commit_before>from twisted.python import log, util
import logging
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
<commit_msg>Add a helper function for exception logging<commit_after>from twisted.python import log, util
import logging, sys, traceback
def logExceptionTrace(error = None):
if error:
log.msg("A Python excecution error occurred:", error, level=logging.ERROR)
log.msg(traceback.format_exc(sys.exc_info()[2]), level=logging.ERROR)
class LevelLoggingObserver(log.FileLogObserver):
def __init__(self, logfile, logLevel):
log.FileLogObserver.__init__(self, logfile)
self.logLevel = logLevel
def __call__(self, eventDict):
self.emit(eventDict)
def emit(self, eventDict):
if eventDict["isError"]:
level = logging.ERROR
elif "level" in eventDict:
level = eventDict["level"]
else:
level = logging.INFO
if level < self.logLevel:
return
message = log.textFromEventDict(eventDict)
if not message:
return
logElements = {
"timestamp": self.formatTime(eventDict["time"]),
"level": logging.getLevelName(level),
"system": eventDict["system"],
"text": message.replace("\n", "\n\t")
}
messageString = "{} {}".format(logElements["timestamp"],
log._safeFormat("%(level)7s:[%(system)s]: %(text)s\n", logElements))
print messageString.replace("\n", "")
util.untilConcludes(self.write, messageString)
util.untilConcludes(self.flush)
|
7e2e1c45ea9f83f11dc5d3b828a36e13825bcd8b | tests/credentials.py | tests/credentials.py | import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if 'TRAVIS' in os.environ:
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
| import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if os.environ.get('TRAVIS'):
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
| Test for Travis CI variable a little less recklessly. | Test for Travis CI variable a little less recklessly.
| Python | mit | dssg/tweedr,dssg/tweedr,dssg/tweedr,dssg/tweedr | import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if 'TRAVIS' in os.environ:
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
Test for Travis CI variable a little less recklessly. | import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if os.environ.get('TRAVIS'):
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
| <commit_before>import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if 'TRAVIS' in os.environ:
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
<commit_msg>Test for Travis CI variable a little less recklessly.<commit_after> | import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if os.environ.get('TRAVIS'):
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
| import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if 'TRAVIS' in os.environ:
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
Test for Travis CI variable a little less recklessly.import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if os.environ.get('TRAVIS'):
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
| <commit_before>import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if 'TRAVIS' in os.environ:
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
<commit_msg>Test for Travis CI variable a little less recklessly.<commit_after>import os
import unittest
import tweedr
class TestCredentials(unittest.TestCase):
def test_mysql(self):
if os.environ.get('TRAVIS'):
print 'For obvious reasons, Travis CI cannot run this test.'
else:
names = ['MYSQL_PASS', 'MYSQL_HOST']
values = [os.environ[name] for name in names]
for base, directories, filenames in os.walk(tweedr.root):
for filename in filenames:
filepath = os.path.join(base, filename)
with open(filepath) as fp:
contents = fp.read()
for value in values:
# assertNotIn(first, second, msg=None)
# Test that first is (or is not) in second.
self.assertNotIn(value, contents, 'Found a blacklisted credential (%s) in %s' % (value, filepath))
|
3b97e2eafaf8e2cdc2b39024f125c284a5f9de23 | tests/python-test-library/testcases/conf.py | tests/python-test-library/testcases/conf.py | #!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
testBusName = "org.freedesktop.context.testing"
testRqstPath = "/org/freedesktop/context/testing/request"
testRqstIfce = "org.freedesktop.context.testing.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp'] | #!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
scriberBusName = "org.freedesktop.context.testing.subHandler"
scriberHandlerPath = "/org/freedesktop/context/testing/subHandler/request"
scriberHandlerIfce = "org.freedesktop.context.testing.subHandler.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp'] | Modify subscription handler interface name | Modify subscription handler interface name
| Python | lgpl-2.1 | rburchell/ck,rburchell/ck,rburchell/ck,rburchell/ck,rburchell/ck | #!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
testBusName = "org.freedesktop.context.testing"
testRqstPath = "/org/freedesktop/context/testing/request"
testRqstIfce = "org.freedesktop.context.testing.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp']Modify subscription handler interface name | #!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
scriberBusName = "org.freedesktop.context.testing.subHandler"
scriberHandlerPath = "/org/freedesktop/context/testing/subHandler/request"
scriberHandlerIfce = "org.freedesktop.context.testing.subHandler.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp'] | <commit_before>#!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
testBusName = "org.freedesktop.context.testing"
testRqstPath = "/org/freedesktop/context/testing/request"
testRqstIfce = "org.freedesktop.context.testing.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp']<commit_msg>Modify subscription handler interface name<commit_after> | #!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
scriberBusName = "org.freedesktop.context.testing.subHandler"
scriberHandlerPath = "/org/freedesktop/context/testing/subHandler/request"
scriberHandlerIfce = "org.freedesktop.context.testing.subHandler.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp'] | #!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
testBusName = "org.freedesktop.context.testing"
testRqstPath = "/org/freedesktop/context/testing/request"
testRqstIfce = "org.freedesktop.context.testing.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp']Modify subscription handler interface name#!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
scriberBusName = "org.freedesktop.context.testing.subHandler"
scriberHandlerPath = "/org/freedesktop/context/testing/subHandler/request"
scriberHandlerIfce = "org.freedesktop.context.testing.subHandler.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp'] | <commit_before>#!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
testBusName = "org.freedesktop.context.testing"
testRqstPath = "/org/freedesktop/context/testing/request"
testRqstIfce = "org.freedesktop.context.testing.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp']<commit_msg>Modify subscription handler interface name<commit_after>#!/usr/bin/env python2.5
##
## @file contextOrientationTCs.py
##
## Copyright (C) 2008 Nokia. All rights reserved.
##
##
##
##
## Requires python2.5-gobject and python2.5-dbus
##
## Implements also some testing API:
##
##
contextSrcPath="."
sessionConfigPath="tests/python-test-library/stubs"
ctxBusName = "org.freedesktop.ContextKit"
ctxMgrPath = "/org/freedesktop/ContextKit/Manager"
ctxMgrIfce = "org.freedesktop.ContextKit.Manager"
ctxScriberIfce = "org.freedesktop.ContextKit.Subscriber"
mceBusName = "com.nokia.mce"
mceRqstPath = "/com/nokia/mce/request"
mceRqstIfce = "com.nokia.mce.request"
scriberBusName = "org.freedesktop.context.testing.subHandler"
scriberHandlerPath = "/org/freedesktop/context/testing/subHandler/request"
scriberHandlerIfce = "org.freedesktop.context.testing.subHandler.request"
scriberOnePath = "/org/freedesktop/ContextKit/Subscribers/0"
scriberTwoPath = "/org/freedesktop/ContextKit/Subscribers/1"
scriberThirdPath = "/org/freedesktop/ContextKit/Subscribers/2"
properties = ['Context.Device.Orientation.facingUp','Context.Device.Orientation.edgeUp'] |
2a779fedbb533eda7a8856e6a543f09faae9ac85 | thinc/neural/tests/unit/Model/test_setup.py | thinc/neural/tests/unit/Model/test_setup.py | # encoding: utf8
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import base
@pytest.mark.parametrize('new_name', ['mymodel', 'layer', 'basic', '', '漢字'])
def test_name_override(new_name):
control = base.Model()
assert control.name == 'model'
model = base.Model(name=new_name)
assert model.name == new_name
assert model.name != 'model'
control = base.Model()
assert control.name == 'model'
@pytest.mark.parametrize('new_device', ['gpu', 'gpu1', 'foreign'])
def test_device_override(new_device):
control = base.Model()
assert control.device == 'cpu'
model = base.Model(device=new_device)
assert model.device == new_device
assert model.device != 'cpu'
control = base.Model()
assert control.device == 'cpu'
def test_add_child_layer_instances():
control = base.Model()
assert len(control.layers) == 0
model = base.Model(None, None,
layers=(base.Model(name='child1'), base.Model(name='child2')))
assert len(model.layers) == 2
assert model.layers[0].name == 'child1'
assert model.layers[1].name == 'child2'
assert model.name == 'model'
assert len(model.layers[0].layers) == 0
| from .... import base
| Remove tests or removed Model.setup method | Remove tests or removed Model.setup method
| Python | mit | explosion/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,spacy-io/thinc | # encoding: utf8
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import base
@pytest.mark.parametrize('new_name', ['mymodel', 'layer', 'basic', '', '漢字'])
def test_name_override(new_name):
control = base.Model()
assert control.name == 'model'
model = base.Model(name=new_name)
assert model.name == new_name
assert model.name != 'model'
control = base.Model()
assert control.name == 'model'
@pytest.mark.parametrize('new_device', ['gpu', 'gpu1', 'foreign'])
def test_device_override(new_device):
control = base.Model()
assert control.device == 'cpu'
model = base.Model(device=new_device)
assert model.device == new_device
assert model.device != 'cpu'
control = base.Model()
assert control.device == 'cpu'
def test_add_child_layer_instances():
control = base.Model()
assert len(control.layers) == 0
model = base.Model(None, None,
layers=(base.Model(name='child1'), base.Model(name='child2')))
assert len(model.layers) == 2
assert model.layers[0].name == 'child1'
assert model.layers[1].name == 'child2'
assert model.name == 'model'
assert len(model.layers[0].layers) == 0
Remove tests or removed Model.setup method | from .... import base
| <commit_before># encoding: utf8
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import base
@pytest.mark.parametrize('new_name', ['mymodel', 'layer', 'basic', '', '漢字'])
def test_name_override(new_name):
control = base.Model()
assert control.name == 'model'
model = base.Model(name=new_name)
assert model.name == new_name
assert model.name != 'model'
control = base.Model()
assert control.name == 'model'
@pytest.mark.parametrize('new_device', ['gpu', 'gpu1', 'foreign'])
def test_device_override(new_device):
control = base.Model()
assert control.device == 'cpu'
model = base.Model(device=new_device)
assert model.device == new_device
assert model.device != 'cpu'
control = base.Model()
assert control.device == 'cpu'
def test_add_child_layer_instances():
control = base.Model()
assert len(control.layers) == 0
model = base.Model(None, None,
layers=(base.Model(name='child1'), base.Model(name='child2')))
assert len(model.layers) == 2
assert model.layers[0].name == 'child1'
assert model.layers[1].name == 'child2'
assert model.name == 'model'
assert len(model.layers[0].layers) == 0
<commit_msg>Remove tests or removed Model.setup method<commit_after> | from .... import base
| # encoding: utf8
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import base
@pytest.mark.parametrize('new_name', ['mymodel', 'layer', 'basic', '', '漢字'])
def test_name_override(new_name):
control = base.Model()
assert control.name == 'model'
model = base.Model(name=new_name)
assert model.name == new_name
assert model.name != 'model'
control = base.Model()
assert control.name == 'model'
@pytest.mark.parametrize('new_device', ['gpu', 'gpu1', 'foreign'])
def test_device_override(new_device):
control = base.Model()
assert control.device == 'cpu'
model = base.Model(device=new_device)
assert model.device == new_device
assert model.device != 'cpu'
control = base.Model()
assert control.device == 'cpu'
def test_add_child_layer_instances():
control = base.Model()
assert len(control.layers) == 0
model = base.Model(None, None,
layers=(base.Model(name='child1'), base.Model(name='child2')))
assert len(model.layers) == 2
assert model.layers[0].name == 'child1'
assert model.layers[1].name == 'child2'
assert model.name == 'model'
assert len(model.layers[0].layers) == 0
Remove tests or removed Model.setup methodfrom .... import base
| <commit_before># encoding: utf8
from __future__ import unicode_literals
import pytest
from flexmock import flexmock
from hypothesis import given, strategies
import abc
from .... import base
@pytest.mark.parametrize('new_name', ['mymodel', 'layer', 'basic', '', '漢字'])
def test_name_override(new_name):
control = base.Model()
assert control.name == 'model'
model = base.Model(name=new_name)
assert model.name == new_name
assert model.name != 'model'
control = base.Model()
assert control.name == 'model'
@pytest.mark.parametrize('new_device', ['gpu', 'gpu1', 'foreign'])
def test_device_override(new_device):
control = base.Model()
assert control.device == 'cpu'
model = base.Model(device=new_device)
assert model.device == new_device
assert model.device != 'cpu'
control = base.Model()
assert control.device == 'cpu'
def test_add_child_layer_instances():
control = base.Model()
assert len(control.layers) == 0
model = base.Model(None, None,
layers=(base.Model(name='child1'), base.Model(name='child2')))
assert len(model.layers) == 2
assert model.layers[0].name == 'child1'
assert model.layers[1].name == 'child2'
assert model.name == 'model'
assert len(model.layers[0].layers) == 0
<commit_msg>Remove tests or removed Model.setup method<commit_after>from .... import base
|
6b9120b5b12da3f32f2c6377905f7ecd103f164e | packages/grid/backend/grid/api/token.py | packages/grid/backend/grid/api/token.py | # stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
| # stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
guest: bool = False
| UPDATE TokenPayload pydantic model to support guest flag | UPDATE TokenPayload pydantic model to support guest flag
| Python | apache-2.0 | OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft | # stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
UPDATE TokenPayload pydantic model to support guest flag | # stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
guest: bool = False
| <commit_before># stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
<commit_msg>UPDATE TokenPayload pydantic model to support guest flag<commit_after> | # stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
guest: bool = False
| # stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
UPDATE TokenPayload pydantic model to support guest flag# stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
guest: bool = False
| <commit_before># stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
<commit_msg>UPDATE TokenPayload pydantic model to support guest flag<commit_after># stdlib
from typing import Optional
# third party
from pydantic import BaseModel
class Token(BaseModel):
access_token: str
token_type: str
metadata: str
class TokenPayload(BaseModel):
sub: Optional[int] = None
guest: bool = False
|
257addd4403ae17a79c955d9751d5f3072c2a020 | nightreads/emails/views.py | nightreads/emails/views.py | from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
| from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
| Update target count to consider users who subded to `all` | Update target count to consider users who subded to `all`
| Python | mit | avinassh/nightreads,avinassh/nightreads | from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
Update target count to consider users who subded to `all` | from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
| <commit_before>from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
<commit_msg>Update target count to consider users who subded to `all`<commit_after> | from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
| from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
Update target count to consider users who subded to `all`from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
| <commit_before>from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
tags__in=email_obj.tags.all()).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
<commit_msg>Update target count to consider users who subded to `all`<commit_after>from django.shortcuts import render, redirect
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.contrib import messages
from .models import Email
from .forms import EmailAdminForm
from nightreads.user_manager.models import Subscription
class SendEmailAdminView(View):
template = 'admin/emails/email/send_email.html'
form_class = EmailAdminForm
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
return render(request, self.template, {'email_obj': email_obj})
def post(self, request, pk):
email_type = request.POST.get('type', '').lower()
email_obj = Email.objects.get(pk=pk)
if email_type == 'preview':
# send preview email
m = 'Preview email has been sent!'
else:
# send email
m = 'Email has been sent!'
email_obj.is_sent = True
messages.add_message(request, messages.INFO, m)
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
class UpdateTargetCountView(View):
def get(self, request, pk):
email_obj = Email.objects.get(pk=pk)
email_obj.targetted_users = Subscription.objects.filter(
Q(tags__in=email_obj.tags.all()) | Q(tags__name='all')).count()
email_obj.save()
return redirect(reverse(
'admin:emails_email_change', args=(email_obj.id,)))
|
a1318a5ced6efc4ae88abc0b23190daea5899704 | open_humans/serializers.py | open_humans/serializers.py | from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
| from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
| Make /api/profile return no private data | Make /api/profile return no private data
| Python | mit | OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans | from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
Make /api/profile return no private data | from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
| <commit_before>from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
<commit_msg>Make /api/profile return no private data<commit_after> | from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
| from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
Make /api/profile return no private datafrom django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
| <commit_before>from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
url = serializers.SerializerMethodField('get_profile_url')
class Meta:
model = User
fields = ('id', 'url', 'username')
def get_profile_url(self, obj):
return reverse('member_profile', args=(obj.id,))
<commit_msg>Make /api/profile return no private data<commit_after>from django.contrib.auth.models import User
# from django.core.urlresolvers import reverse
from rest_framework import serializers
class ProfileSerializer(serializers.ModelSerializer):
# url = serializers.SerializerMethodField('get_profile_url')
message = serializers.SerializerMethodField('get_message')
class Meta:
model = User
# fields = ('id', 'url', 'username')
fields = ('message',)
# def get_profile_url(self, obj):
# return reverse('member_profile', args=(obj.id,))
def get_message(self, obj):
return 'profiles are not yet implemented'
|
ecdde0ef68a295910039c0a3a4a26f0580fd81f6 | starbowmodweb/news/views.py | starbowmodweb/news/views.py | from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode.render_html(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
| from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode_parser.format(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
| Add support for the [img] tag. | Add support for the [img] tag.
| Python | mit | Starbow/StarbowWebSite,Starbow/StarbowWebSite,Starbow/StarbowWebSite | from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode.render_html(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
Add support for the [img] tag. | from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode_parser.format(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
| <commit_before>from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode.render_html(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
<commit_msg>Add support for the [img] tag.<commit_after> | from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode_parser.format(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
| from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode.render_html(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
Add support for the [img] tag.from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode_parser.format(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
| <commit_before>from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode.render_html(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
<commit_msg>Add support for the [img] tag.<commit_after>from django.http import HttpResponse
from django.shortcuts import render
from django.db import connections
import bbcode
def bbcode_img(tag_name, value, options, parent, context):
if tag_name in options and 'x' in options[tag_name]:
options['width'], options['height'] = options[tag_name].split('x', 1)
del options[tag_name]
attrs = ' '.join([name+'="{}"' for name in options.keys()])
return ('<img src="{}" '+attrs+' />').format(value, *options.values())
bbcode_parser = bbcode.Parser()
bbcode_parser.add_formatter("img", bbcode_img, replace_links=False)
def dictfetchall(cursor):
"Returns all rows from a cursor as a dict"
desc = cursor.description
return [
dict(zip([col[0] for col in desc], row))
for row in cursor.fetchall()
]
def all_news(request):
cursor = connections['mybb'].cursor()
cursor.execute("SELECT * FROM mybb_threads JOIN mybb_posts WHERE firstpost=mybb_posts.pid AND mybb_threads.fid=%s AND mybb_threads.visible=1", [3])
articles = dictfetchall(cursor)
for article in articles:
article['html'] = bbcode_parser.format(article['message'])
return render(request, 'news_listing.html', dict(articles=articles))
|
577fb4b24f681260cd49a3503566fe921e2d252f | compresstest.py | compresstest.py | #!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(processitem, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
| #!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
| Fix copy/paste error in Gzip open | Fix copy/paste error in Gzip open
| Python | unlicense | tomc603/pycompresstest | #!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(processitem, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
Fix copy/paste error in Gzip open | #!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
| <commit_before>#!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(processitem, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
<commit_msg>Fix copy/paste error in Gzip open<commit_after> | #!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
| #!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(processitem, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
Fix copy/paste error in Gzip open#!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
| <commit_before>#!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(processitem, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
<commit_msg>Fix copy/paste error in Gzip open<commit_after>#!/usr/bin/python
import bz2
import gzip
import optparse
import os
import sys
import time
if __name__ == '__main__':
# Create the option parser, and give a small usage example.
optionparser = optparse.OptionParser(usage='%prog [options] -f /some/file.bz2')
optionparser.add_option('-f', '--file', default='', dest='compfile',
help='Compressed file to decompress')
options, args = optionparser.parse_args()
if not options.compfile:
print('ERROR: You must provide a compressed file!')
sys.exit(1)
try:
print('Decompressing %s' % options.compfile)
st = time.time()
if options.compfile.endswith('.bz2'):
f = bz2.BZ2File(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
elif options.compfile.endswith('.gz'):
f = gzip.GzipFile(options.compfile, 'rb')
f.read()
print(' Deomcpress time: %0.3f' % (time.time() - st))
f.close()
except Exception, e:
print(' * ERROR: ', e)
finally:
print('Done')
|
8ec20a26531abab5e3824cabe36d87a9e761cba0 | testing/skarphed-core/workspace/tests/permissions.py | testing/skarphed-core/workspace/tests/permissions.py | #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(0,1)
def tearDown(self):
CoreTestCase.tearDown(self)
| #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(1,1)
def tearDown(self):
CoreTestCase.tearDown(self)
| Revert "commiting failing dummytest to test CI-setup" | Revert "commiting failing dummytest to test CI-setup"
This reverts commit eaac3ef8430d0a0c02ebaed82e1e8d27889124a6.
| Python | agpl-3.0 | skarphed/skarphed,skarphed/skarphed | #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(0,1)
def tearDown(self):
CoreTestCase.tearDown(self)
Revert "commiting failing dummytest to test CI-setup"
This reverts commit eaac3ef8430d0a0c02ebaed82e1e8d27889124a6. | #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(1,1)
def tearDown(self):
CoreTestCase.tearDown(self)
| <commit_before>#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(0,1)
def tearDown(self):
CoreTestCase.tearDown(self)
<commit_msg>Revert "commiting failing dummytest to test CI-setup"
This reverts commit eaac3ef8430d0a0c02ebaed82e1e8d27889124a6.<commit_after> | #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(1,1)
def tearDown(self):
CoreTestCase.tearDown(self)
| #!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(0,1)
def tearDown(self):
CoreTestCase.tearDown(self)
Revert "commiting failing dummytest to test CI-setup"
This reverts commit eaac3ef8430d0a0c02ebaed82e1e8d27889124a6.#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(1,1)
def tearDown(self):
CoreTestCase.tearDown(self)
| <commit_before>#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(0,1)
def tearDown(self):
CoreTestCase.tearDown(self)
<commit_msg>Revert "commiting failing dummytest to test CI-setup"
This reverts commit eaac3ef8430d0a0c02ebaed82e1e8d27889124a6.<commit_after>#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
from skd_test import CoreTestCase
class TestPermissionFunctions(CoreTestCase):
def setUp(self):
CoreTestCase.setUp(self)
def test_bla(self):
self.assertEqual(1,1)
def tearDown(self):
CoreTestCase.tearDown(self)
|
ea67ca087b06347625f8116e1583fd046a75159a | providers/pt/rcaap/apps.py | providers/pt/rcaap/apps.py | from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
| from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
time_granularity = False
| Remove time granularity from rcaap | Remove time granularity from rcaap
| Python | apache-2.0 | laurenbarker/SHARE,CenterForOpenScience/SHARE,zamattiac/SHARE,aaxelb/SHARE,zamattiac/SHARE,aaxelb/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,laurenbarker/SHARE,zamattiac/SHARE,aaxelb/SHARE | from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
Remove time granularity from rcaap | from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
time_granularity = False
| <commit_before>from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
<commit_msg>Remove time granularity from rcaap<commit_after> | from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
time_granularity = False
| from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
Remove time granularity from rcaapfrom share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
time_granularity = False
| <commit_before>from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
<commit_msg>Remove time granularity from rcaap<commit_after>from share.provider import OAIProviderAppConfig
class AppConfig(OAIProviderAppConfig):
name = 'providers.pt.rcaap'
version = '0.0.1'
title = 'rcaap'
long_title = 'RCAAP - Repositório Científico de Acesso Aberto de Portugal'
home_page = 'http://www.rcaap.pt'
url = 'http://www.rcaap.pt/oai'
approved_sets = ['portugal']
time_granularity = False
|
d0e95cf13290049a95ac3b92e16cfba80b770147 | connector/views.py | connector/views.py | from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
| from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
| Add mime types for jpeg, gif. | Add mime types for jpeg, gif.
| Python | apache-2.0 | acthp/ucsc-xena-client,acthp/ucsc-xena-client,acthp/ucsc-xena-client,ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client,ucscXena/ucsc-xena-client | from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
Add mime types for jpeg, gif. | from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
| <commit_before>from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
<commit_msg>Add mime types for jpeg, gif.<commit_after> | from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
| from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
Add mime types for jpeg, gif.from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
| <commit_before>from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
<commit_msg>Add mime types for jpeg, gif.<commit_after>from django.conf import settings
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseNotFound
from django.template import Template
from cancer_browser.core.http import HttpResponseSendFile
from django.core.urlresolvers import reverse
import os, re
def client_vars(request, base):
return {
'settings': 'enable',
'jslogging': settings.JSLOGGING,
'ga_id': settings.GA_ID,
'baseurl': base,
}
types = {
'js': 'application/javascript',
'png': 'image/png',
'jpg': 'image/jpeg',
'jpeg': 'image/jpeg',
'gif': 'image/gif',
'css': 'text/css',
'map': 'application/json'
}
def content(request, filename):
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), filename)
ext = os.path.splitext(filename)[1][1:]
if os.path.exists(path):
return HttpResponseSendFile(path, types[ext])
return HttpResponseNotFound()
def drop_last(path):
return re.sub(r"[^/]+/$", "", path)
def page(request):
from django.middleware.csrf import get_token
get_token(request) # force csrf
cvars = client_vars(request, drop_last(reverse(page)))
dirname = os.path.dirname(os.path.realpath(__file__))
t = Template(open(os.path.join(dirname, 'index.html')).read());
c = RequestContext(request, cvars)
return HttpResponse(t.render(c))
|
1e2822bb71c123993419479c2d4f5fc3e80e35bb | reddit_adzerk/adzerkads.py | reddit_adzerk/adzerkads.py | from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| Use analytics name for subreddits if available. | Use analytics name for subreddits if available.
This allows a catch-all for multis to be used.
| Python | bsd-3-clause | madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk | from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
Use analytics name for subreddits if available.
This allows a catch-all for multis to be used. | from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| <commit_before>from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
<commit_msg>Use analytics name for subreddits if available.
This allows a catch-all for multis to be used.<commit_after> | from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
Use analytics name for subreddits if available.
This allows a catch-all for multis to be used.from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| <commit_before>from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
<commit_msg>Use analytics name for subreddits if available.
This allows a catch-all for multis to be used.<commit_after>from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_all_the_things = g.live_config.get("adzerk_all_the_things")
adzerk_srs = g.live_config.get("adzerk_srs")
in_adzerk_sr = adzerk_srs and c.site.name.lower() in adzerk_srs
if adzerk_all_the_things or in_adzerk_sr:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
site_name = getattr(c.site, "analytics_name", c.site.name)
self.ad_url = g.config[url_key].format(
subreddit=quote(site_name.lower()),
origin=c.request_origin,
)
self.frame_id = "ad_main"
|
cb747072b171882bcfceeeb720bba9e7c53c9357 | fm/mathengine/problemTypes/derivPolynomial.py | fm/mathengine/problemTypes/derivPolynomial.py | import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial) | import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial, x) | Fix bug in polynomial differentiate module | Fix bug in polynomial differentiate module
| Python | mit | stephenjust/fastmath,stephenjust/fastmath,stephenjust/fastmath,stephenjust/fastmath | import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial)Fix bug in polynomial differentiate module | import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial, x) | <commit_before>import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial)<commit_msg>Fix bug in polynomial differentiate module<commit_after> | import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial, x) | import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial)Fix bug in polynomial differentiate moduleimport random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial, x) | <commit_before>import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial)<commit_msg>Fix bug in polynomial differentiate module<commit_after>import random
from sympy import *
import fm.mathengine.problemTypes.problem as problem
class derivPolynomial(problem.Problem):
terms = [1,2,3,4,5]
exponents = [0,1,2,3,4,5,6]
coefficients = [1,2,3,4,5,6,7,8,9]
def generate_problem(self):
nterms = random.choice(self.terms)
nexponents = random.sample(self.exponents, nterms)
polynomial = 0
x = self.x
for i in range(1, nterms):
polynomial += random.choice(self.coefficients) * x ** nexponents.pop(0)
self.problem_statement = Derivative(polynomial, x) |
f0b30432d38cba43d534727b74644e9d29b7264f | Lib/defcon/errors.py | Lib/defcon/errors.py | class DefconError(Exception): pass | class DefconError(Exception):
_report = None
def _set_report(self, value):
self._report = value
def _get_report(self):
return self._report
report = property(_get_report, _set_report)
| Allow DefconError to contain a report. | Allow DefconError to contain a report.
| Python | mit | moyogo/defcon,adrientetar/defcon,anthrotype/defcon,typesupply/defcon | class DefconError(Exception): passAllow DefconError to contain a report. | class DefconError(Exception):
_report = None
def _set_report(self, value):
self._report = value
def _get_report(self):
return self._report
report = property(_get_report, _set_report)
| <commit_before>class DefconError(Exception): pass<commit_msg>Allow DefconError to contain a report.<commit_after> | class DefconError(Exception):
_report = None
def _set_report(self, value):
self._report = value
def _get_report(self):
return self._report
report = property(_get_report, _set_report)
| class DefconError(Exception): passAllow DefconError to contain a report.class DefconError(Exception):
_report = None
def _set_report(self, value):
self._report = value
def _get_report(self):
return self._report
report = property(_get_report, _set_report)
| <commit_before>class DefconError(Exception): pass<commit_msg>Allow DefconError to contain a report.<commit_after>class DefconError(Exception):
_report = None
def _set_report(self, value):
self._report = value
def _get_report(self):
return self._report
report = property(_get_report, _set_report)
|
601a8d665a9bf84f8deea17153ffa9a94267abbf | tests/api_tests/base/test_middleware.py | tests/api_tests/base/test_middleware.py | # -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
middleware = TokuTransactionsMiddleware()
mock_response = mock.Mock()
mock_response.status_code = 400
middleware.process_response(mock.Mock(), mock_response)
assert_is(mock_commands.rollback.assert_called_once_with(), None)
| # -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
def setUp(self):
super(TestMiddlewareRollback, self).setUp()
self.middleware = TokuTransactionsMiddleware()
self.mock_response = mock.Mock()
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
self.mock_response.status_code = 400
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.rollback.called)
@mock.patch('api.base.middleware.commands')
def test_200_OK_causes_commit(self, mock_commands):
self.mock_response.status_code = 200
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.commit.called)
| Clean up assert statements and add test for successful commit | Clean up assert statements and add test for successful commit
| Python | apache-2.0 | emetsger/osf.io,kwierman/osf.io,abought/osf.io,caneruguz/osf.io,baylee-d/osf.io,GageGaskins/osf.io,mluo613/osf.io,jnayak1/osf.io,sloria/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,kch8qx/osf.io,rdhyee/osf.io,emetsger/osf.io,zamattiac/osf.io,billyhunt/osf.io,mattclark/osf.io,danielneis/osf.io,kch8qx/osf.io,aaxelb/osf.io,cslzchen/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,alexschiller/osf.io,pattisdr/osf.io,TomBaxter/osf.io,samanehsan/osf.io,baylee-d/osf.io,erinspace/osf.io,danielneis/osf.io,doublebits/osf.io,crcresearch/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,aaxelb/osf.io,mluke93/osf.io,erinspace/osf.io,rdhyee/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,doublebits/osf.io,DanielSBrown/osf.io,mluo613/osf.io,kch8qx/osf.io,wearpants/osf.io,samanehsan/osf.io,KAsante95/osf.io,cosenal/osf.io,ticklemepierce/osf.io,binoculars/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,emetsger/osf.io,acshi/osf.io,aaxelb/osf.io,wearpants/osf.io,cosenal/osf.io,caseyrygt/osf.io,kwierman/osf.io,SSJohns/osf.io,leb2dg/osf.io,haoyuchen1992/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,zachjanicki/osf.io,icereval/osf.io,crcresearch/osf.io,felliott/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,pattisdr/osf.io,acshi/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,icereval/osf.io,hmoco/osf.io,pattisdr/osf.io,SSJohns/osf.io,samanehsan/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,GageGaskins/osf.io,erinspace/osf.io,Nesiehr/osf.io,cslzchen/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,cosenal/osf.io,zachjanicki/osf.io,njantrania/osf.io,Nesiehr/osf.io,kwierman/osf.io,icereval/osf.io,wearpants/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,njantrania/osf.io,abought/osf.io,leb2dg/osf.io,chrisseto/osf.io,jnayak1/osf.io,adlius/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,acshi/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,cosenal/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,sloria/osf.io,mattclark/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,cslzchen/osf.io,laurenrevere/osf.io,mluke93/osf.io,adlius/osf.io,KAsante95/osf.io,Ghalko/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,alexschiller/osf.io,hmoco/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,caneruguz/osf.io,acshi/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,chennan47/osf.io,emetsger/osf.io,RomanZWang/osf.io,njantrania/osf.io,jnayak1/osf.io,samchrisinger/osf.io,kwierman/osf.io,abought/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,asanfilippo7/osf.io,caseyrollins/osf.io,mluke93/osf.io,binoculars/osf.io,mluo613/osf.io,samanehsan/osf.io,zamattiac/osf.io,mattclark/osf.io,felliott/osf.io,kch8qx/osf.io,Nesiehr/osf.io,felliott/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,asanfilippo7/osf.io,GageGaskins/osf.io,sloria/osf.io,laurenrevere/osf.io,amyshi188/osf.io,zamattiac/osf.io,RomanZWang/osf.io,adlius/osf.io,aaxelb/osf.io,binoculars/osf.io,monikagrabowska/osf.io,rdhyee/osf.io,doublebits/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,caneruguz/osf.io,saradbowman/osf.io,caseyrygt/osf.io,wearpants/osf.io,ticklemepierce/osf.io,hmoco/osf.io,asanfilippo7/osf.io,cwisecarver/osf.io,baylee-d/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,chennan47/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,caneruguz/osf.io,Ghalko/osf.io,billyhunt/osf.io,brandonPurvis/osf.io,doublebits/osf.io,acshi/osf.io,chrisseto/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,amyshi188/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,ZobairAlijan/osf.io,Ghalko/osf.io,KAsante95/osf.io,mfraezz/osf.io,haoyuchen1992/osf.io,SSJohns/osf.io,mluke93/osf.io,caseyrygt/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,mluo613/osf.io,abought/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,jnayak1/osf.io,samchrisinger/osf.io,billyhunt/osf.io,SSJohns/osf.io,zachjanicki/osf.io,caseyrygt/osf.io,danielneis/osf.io,alexschiller/osf.io,alexschiller/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,Ghalko/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,TomBaxter/osf.io,njantrania/osf.io,felliott/osf.io,ZobairAlijan/osf.io,adlius/osf.io,chrisseto/osf.io,saradbowman/osf.io,chennan47/osf.io,doublebits/osf.io,chrisseto/osf.io | # -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
middleware = TokuTransactionsMiddleware()
mock_response = mock.Mock()
mock_response.status_code = 400
middleware.process_response(mock.Mock(), mock_response)
assert_is(mock_commands.rollback.assert_called_once_with(), None)
Clean up assert statements and add test for successful commit | # -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
def setUp(self):
super(TestMiddlewareRollback, self).setUp()
self.middleware = TokuTransactionsMiddleware()
self.mock_response = mock.Mock()
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
self.mock_response.status_code = 400
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.rollback.called)
@mock.patch('api.base.middleware.commands')
def test_200_OK_causes_commit(self, mock_commands):
self.mock_response.status_code = 200
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.commit.called)
| <commit_before># -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
middleware = TokuTransactionsMiddleware()
mock_response = mock.Mock()
mock_response.status_code = 400
middleware.process_response(mock.Mock(), mock_response)
assert_is(mock_commands.rollback.assert_called_once_with(), None)
<commit_msg>Clean up assert statements and add test for successful commit<commit_after> | # -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
def setUp(self):
super(TestMiddlewareRollback, self).setUp()
self.middleware = TokuTransactionsMiddleware()
self.mock_response = mock.Mock()
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
self.mock_response.status_code = 400
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.rollback.called)
@mock.patch('api.base.middleware.commands')
def test_200_OK_causes_commit(self, mock_commands):
self.mock_response.status_code = 200
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.commit.called)
| # -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
middleware = TokuTransactionsMiddleware()
mock_response = mock.Mock()
mock_response.status_code = 400
middleware.process_response(mock.Mock(), mock_response)
assert_is(mock_commands.rollback.assert_called_once_with(), None)
Clean up assert statements and add test for successful commit# -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
def setUp(self):
super(TestMiddlewareRollback, self).setUp()
self.middleware = TokuTransactionsMiddleware()
self.mock_response = mock.Mock()
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
self.mock_response.status_code = 400
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.rollback.called)
@mock.patch('api.base.middleware.commands')
def test_200_OK_causes_commit(self, mock_commands):
self.mock_response.status_code = 200
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.commit.called)
| <commit_before># -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
middleware = TokuTransactionsMiddleware()
mock_response = mock.Mock()
mock_response.status_code = 400
middleware.process_response(mock.Mock(), mock_response)
assert_is(mock_commands.rollback.assert_called_once_with(), None)
<commit_msg>Clean up assert statements and add test for successful commit<commit_after># -*- coding: utf-8 -*-
from tests.base import ApiTestCase, fake
import mock
from nose.tools import * # flake8: noqa
from api.base.middleware import TokuTransactionsMiddleware
from tests.base import ApiTestCase
class TestMiddlewareRollback(ApiTestCase):
def setUp(self):
super(TestMiddlewareRollback, self).setUp()
self.middleware = TokuTransactionsMiddleware()
self.mock_response = mock.Mock()
@mock.patch('api.base.middleware.commands')
def test_400_error_causes_rollback(self, mock_commands):
self.mock_response.status_code = 400
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.rollback.called)
@mock.patch('api.base.middleware.commands')
def test_200_OK_causes_commit(self, mock_commands):
self.mock_response.status_code = 200
self.middleware.process_response(mock.Mock(), self.mock_response)
assert_true(mock_commands.commit.called)
|
d09a3dfae995b514b417c07db24fcd8a6e343f1e | migrations/160-products-migration.py | migrations/160-products-migration.py | from django.utils.encoding import smart_str
from kitsune.products.models import Product
from kitsune.taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
| from django.utils.encoding import smart_str
from kitsune.products.models import Product
from taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
| Fix wrong module name in migrations | Fix wrong module name in migrations
| Python | bsd-3-clause | safwanrahman/kitsune,chirilo/kitsune,YOTOV-LIMITED/kitsune,safwanrahman/kitsune,H1ghT0p/kitsune,brittanystoroz/kitsune,mozilla/kitsune,MikkCZ/kitsune,orvi2014/kitsune,anushbmx/kitsune,H1ghT0p/kitsune,safwanrahman/linuxdesh,MikkCZ/kitsune,YOTOV-LIMITED/kitsune,asdofindia/kitsune,MikkCZ/kitsune,MziRintu/kitsune,iDTLabssl/kitsune,turtleloveshoes/kitsune,turtleloveshoes/kitsune,iDTLabssl/kitsune,dbbhattacharya/kitsune,feer56/Kitsune2,brittanystoroz/kitsune,NewPresident1/kitsune,silentbob73/kitsune,Osmose/kitsune,NewPresident1/kitsune,brittanystoroz/kitsune,philipp-sumo/kitsune,H1ghT0p/kitsune,safwanrahman/kitsune,turtleloveshoes/kitsune,safwanrahman/linuxdesh,YOTOV-LIMITED/kitsune,safwanrahman/linuxdesh,iDTLabssl/kitsune,chirilo/kitsune,rlr/kitsune,chirilo/kitsune,feer56/Kitsune2,mozilla/kitsune,rlr/kitsune,NewPresident1/kitsune,orvi2014/kitsune,MziRintu/kitsune,feer56/Kitsune1,silentbob73/kitsune,Osmose/kitsune,dbbhattacharya/kitsune,asdofindia/kitsune,anushbmx/kitsune,anushbmx/kitsune,feer56/Kitsune1,turtleloveshoes/kitsune,orvi2014/kitsune,YOTOV-LIMITED/kitsune,dbbhattacharya/kitsune,dbbhattacharya/kitsune,anushbmx/kitsune,MikkCZ/kitsune,brittanystoroz/kitsune,philipp-sumo/kitsune,asdofindia/kitsune,rlr/kitsune,silentbob73/kitsune,rlr/kitsune,Osmose/kitsune,mozilla/kitsune,iDTLabssl/kitsune,feer56/Kitsune2,H1ghT0p/kitsune,MziRintu/kitsune,silentbob73/kitsune,mythmon/kitsune,mythmon/kitsune,feer56/Kitsune1,chirilo/kitsune,orvi2014/kitsune,Osmose/kitsune,feer56/Kitsune2,philipp-sumo/kitsune,NewPresident1/kitsune,mythmon/kitsune,safwanrahman/kitsune,MziRintu/kitsune,asdofindia/kitsune,mythmon/kitsune,mozilla/kitsune | from django.utils.encoding import smart_str
from kitsune.products.models import Product
from kitsune.taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
Fix wrong module name in migrations | from django.utils.encoding import smart_str
from kitsune.products.models import Product
from taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
| <commit_before>from django.utils.encoding import smart_str
from kitsune.products.models import Product
from kitsune.taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
<commit_msg>Fix wrong module name in migrations<commit_after> | from django.utils.encoding import smart_str
from kitsune.products.models import Product
from taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
| from django.utils.encoding import smart_str
from kitsune.products.models import Product
from kitsune.taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
Fix wrong module name in migrationsfrom django.utils.encoding import smart_str
from kitsune.products.models import Product
from taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
| <commit_before>from django.utils.encoding import smart_str
from kitsune.products.models import Product
from kitsune.taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
<commit_msg>Fix wrong module name in migrations<commit_after>from django.utils.encoding import smart_str
from kitsune.products.models import Product
from taggit.models import Tag
from kitsune.wiki.models import Document
tags_to_migrate = {
# source tag -> product
'firefox': ['firefox'],
'sync': ['firefox', 'mobile'],
'persona': ['firefox'],
'desktop': ['firefox'],
'fxhome': ['firefox', 'mobile'],
'firefox-10': ['firefox'],
'firefox-602': ['firefox'],
'firefox-50': ['firefox'],
'android': ['mobile'],
'mobile': ['mobile']
}
def assert_equals(a, b):
assert a == b, '%s != %s' % (a, b)
def run():
# Get all the tags to migrate.
tags = list(Tag.objects.filter(slug__in=tags_to_migrate.keys()))
total_affected = 0
# For each tag, get the document and add a product for it.
for tag in tags:
for product_slug in tags_to_migrate[tag.slug]:
product = Product.objects.get(slug=product_slug)
# Assign the product to all the documents tagged with tag.
for doc in Document.objects.filter(tags__slug=tag.slug):
doc.products.add(product)
print 'Added product "%s" to document "%s"' % (
smart_str(product.slug), smart_str(doc.title))
total_affected += 1
print 'Done! (%d)' % total_affected
|
e0709e7432cf5acd82a8fe914b50fca6d5cdc735 | scoring_engine/models/__init__.py | scoring_engine/models/__init__.py | from scoring_engine.models.user import User
from scoring_engine.models.service import Service
from scoring_engine.models.account import Account
from scoring_engine.models.property import Property
from scoring_engine.models.team import Team
from scoring_engine.models.check import Check
from scoring_engine.models.round import Round
from scoring_engine.models.kb import KB
from scoring_engine.models.environment import Environment | Revert changes to models init | Revert changes to models init
| Python | mit | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | Revert changes to models init | from scoring_engine.models.user import User
from scoring_engine.models.service import Service
from scoring_engine.models.account import Account
from scoring_engine.models.property import Property
from scoring_engine.models.team import Team
from scoring_engine.models.check import Check
from scoring_engine.models.round import Round
from scoring_engine.models.kb import KB
from scoring_engine.models.environment import Environment | <commit_before><commit_msg>Revert changes to models init<commit_after> | from scoring_engine.models.user import User
from scoring_engine.models.service import Service
from scoring_engine.models.account import Account
from scoring_engine.models.property import Property
from scoring_engine.models.team import Team
from scoring_engine.models.check import Check
from scoring_engine.models.round import Round
from scoring_engine.models.kb import KB
from scoring_engine.models.environment import Environment | Revert changes to models initfrom scoring_engine.models.user import User
from scoring_engine.models.service import Service
from scoring_engine.models.account import Account
from scoring_engine.models.property import Property
from scoring_engine.models.team import Team
from scoring_engine.models.check import Check
from scoring_engine.models.round import Round
from scoring_engine.models.kb import KB
from scoring_engine.models.environment import Environment | <commit_before><commit_msg>Revert changes to models init<commit_after>from scoring_engine.models.user import User
from scoring_engine.models.service import Service
from scoring_engine.models.account import Account
from scoring_engine.models.property import Property
from scoring_engine.models.team import Team
from scoring_engine.models.check import Check
from scoring_engine.models.round import Round
from scoring_engine.models.kb import KB
from scoring_engine.models.environment import Environment | |
3c65881633daee8d5b19760e5c887dce25ab69c3 | froide/helper/db_utils.py | froide/helper/db_utils.py | from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
raise
else:
break
| from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
count += 1
else:
raise
else:
break
| Fix bad initial count in slug creation helper | Fix bad initial count in slug creation helper | Python | mit | stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide | from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
raise
else:
break
Fix bad initial count in slug creation helper | from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
count += 1
else:
raise
else:
break
| <commit_before>from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
raise
else:
break
<commit_msg>Fix bad initial count in slug creation helper<commit_after> | from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
count += 1
else:
raise
else:
break
| from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
raise
else:
break
Fix bad initial count in slug creation helperfrom django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
count += 1
else:
raise
else:
break
| <commit_before>from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
first_round = False
count = klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count()
else:
raise
else:
break
<commit_msg>Fix bad initial count in slug creation helper<commit_after>from django.db import IntegrityError
from django.template.defaultfilters import slugify
def save_obj_with_slug(obj, attribute='title', **kwargs):
obj.slug = slugify(getattr(obj, attribute))
return save_obj_unique(obj, 'slug', **kwargs)
def save_obj_unique(obj, attr, count=0, postfix_format='-{count}'):
klass = obj.__class__
MAX_COUNT = 10000 # max 10 thousand loops
base_attr = getattr(obj, attr)
initial_count = count
first_round = count == 0
postfix = ''
while True:
try:
while initial_count - count < MAX_COUNT:
if not first_round:
postfix = postfix_format.format(count=count)
if not klass.objects.filter(**{
attr: getattr(obj, attr) + postfix
}).exists():
break
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
else:
count += 1
setattr(obj, attr, base_attr + postfix)
obj.save()
except IntegrityError:
if count - initial_count < MAX_COUNT:
if first_round:
first_round = False
count = max(
klass.objects.filter(**{
'%s__startswith' % attr: base_attr
}).count(),
initial_count
)
count += 1
else:
raise
else:
break
|
186b3846e5fe9298549b5a5c98e9ec9817f35203 | twisted/plugins/specter_plugin.py | twisted/plugins/specter_plugin.py | from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
["key", None, "specter.key", "SSL key file"],
["cert", None, "specter.crt", "SSL certificate file"]
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(options['config'])),
ssl.DefaultOpenSSLContextFactory(
options['key'],
options['cert']
)
)
serviceMaker = SpecterServiceMaker()
| import yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
from zope.interface import implements
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(config)),
ssl.DefaultOpenSSLContextFactory(
config['ssl-key'],
config['ssl-cert']
)
)
serviceMaker = SpecterServiceMaker()
| Move config parsing to plugin, use for SSL keys | Move config parsing to plugin, use for SSL keys
| Python | mit | praekelt/specter,praekelt/specter | from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
["key", None, "specter.key", "SSL key file"],
["cert", None, "specter.crt", "SSL certificate file"]
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(options['config'])),
ssl.DefaultOpenSSLContextFactory(
options['key'],
options['cert']
)
)
serviceMaker = SpecterServiceMaker()
Move config parsing to plugin, use for SSL keys | import yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
from zope.interface import implements
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(config)),
ssl.DefaultOpenSSLContextFactory(
config['ssl-key'],
config['ssl-cert']
)
)
serviceMaker = SpecterServiceMaker()
| <commit_before>from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
["key", None, "specter.key", "SSL key file"],
["cert", None, "specter.crt", "SSL certificate file"]
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(options['config'])),
ssl.DefaultOpenSSLContextFactory(
options['key'],
options['cert']
)
)
serviceMaker = SpecterServiceMaker()
<commit_msg>Move config parsing to plugin, use for SSL keys<commit_after> | import yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
from zope.interface import implements
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(config)),
ssl.DefaultOpenSSLContextFactory(
config['ssl-key'],
config['ssl-cert']
)
)
serviceMaker = SpecterServiceMaker()
| from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
["key", None, "specter.key", "SSL key file"],
["cert", None, "specter.crt", "SSL certificate file"]
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(options['config'])),
ssl.DefaultOpenSSLContextFactory(
options['key'],
options['cert']
)
)
serviceMaker = SpecterServiceMaker()
Move config parsing to plugin, use for SSL keysimport yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
from zope.interface import implements
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(config)),
ssl.DefaultOpenSSLContextFactory(
config['ssl-key'],
config['ssl-cert']
)
)
serviceMaker = SpecterServiceMaker()
| <commit_before>from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
["key", None, "specter.key", "SSL key file"],
["cert", None, "specter.crt", "SSL certificate file"]
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(options['config'])),
ssl.DefaultOpenSSLContextFactory(
options['key'],
options['cert']
)
)
serviceMaker = SpecterServiceMaker()
<commit_msg>Move config parsing to plugin, use for SSL keys<commit_after>import yaml
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from twisted.web import server
from twisted.internet import ssl
from zope.interface import implements
import specter
class Options(usage.Options):
optParameters = [
["port", "p", 2400, "The port to listen on"],
["config", "c", "specter.yml", "Config file"],
]
class SpecterServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = "specter"
description = "Distributex - A simple mutex lock service"
options = Options
def makeService(self, options):
config = yaml.load(open(options['config']))
return internet.SSLServer(
int(options['port']),
server.Site(specter.SiteRoot(config)),
ssl.DefaultOpenSSLContextFactory(
config['ssl-key'],
config['ssl-cert']
)
)
serviceMaker = SpecterServiceMaker()
|
139346c72a09719eba3d444c67d1b54c1b68eae6 | uni_form/templatetags/uni_form_field.py | uni_form/templatetags/uni_form_field.py | from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
| from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload",
"passwordinput":"passwordinput textInput"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
| Add "passwordinput" to class_converter, because it needs the textInput class too. | Add "passwordinput" to class_converter, because it needs the textInput class too.
| Python | mit | treyhunner/django-crispy-forms,uranusjr/django-crispy-forms-ng,alanwj/django-crispy-forms,iris-edu-int/django-crispy-forms,scuml/django-crispy-forms,RamezIssac/django-crispy-forms,PetrDlouhy/django-crispy-forms,zixan/django-crispy-forms,smirolo/django-crispy-forms,schrd/django-crispy-forms,CashStar/django-uni-form,pydanny/django-uni-form,ngenovictor/django-crispy-forms,eykanal/django-crispy-forms,eykanal/django-crispy-forms,treyhunner/django-crispy-forms,spectras/django-crispy-forms,IanLee1521/django-crispy-forms,django-crispy-forms/django-crispy-forms,carltongibson/django-crispy-forms,agepoly/django-crispy-forms,iris-edu-int/django-crispy-forms,PetrDlouhy/django-crispy-forms,avsd/django-crispy-forms,maraujop/django-crispy-forms,spectras/django-crispy-forms,saydulk/django-crispy-forms,jtyoung/django-crispy-forms,impulse-cloud/django-crispy-forms,iedparis8/django-crispy-forms,bouttier/django-crispy-forms,ngenovictor/django-crispy-forms,jtyoung/django-crispy-forms,jcomeauictx/django-crispy-forms,avsd/django-crispy-forms,iris-edu/django-crispy-forms,carltongibson/django-crispy-forms,zixan/django-crispy-forms,dessibelle/django-crispy-forms,pydanny/django-uni-form,davidszotten/django-crispy-forms,CashStar/django-uni-form,alanwj/django-crispy-forms,maraujop/django-crispy-forms,ionelmc/django-uni-form,rfleschenberg/django-crispy-forms,IanLee1521/django-crispy-forms,davidszotten/django-crispy-forms,rfleschenberg/django-crispy-forms,agepoly/django-crispy-forms,scuml/django-crispy-forms,VishvajitP/django-crispy-forms,iedparis8/django-crispy-forms,HungryCloud/django-crispy-forms,damienjones/django-crispy-forms,bouttier/django-crispy-forms,dzhuang/django-crispy-forms,RamezIssac/django-crispy-forms,schrd/django-crispy-forms,iris-edu/django-crispy-forms,VishvajitP/django-crispy-forms,tarunlnmiit/django-crispy-forms,jcomeauictx/django-crispy-forms,HungryCloud/django-crispy-forms,tarunlnmiit/django-crispy-forms,damienjones/django-crispy-forms,django-crispy-forms/django-crispy-forms,impulse-cloud/django-crispy-forms,pjdelport/django-crispy-forms,dzhuang/django-crispy-forms,Stranger6667/django-crispy-forms,Stranger6667/django-crispy-forms,uranusjr/django-crispy-forms-ng,dessibelle/django-crispy-forms,HungryCloud/django-crispy-forms,smirolo/django-crispy-forms,saydulk/django-crispy-forms | from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
Add "passwordinput" to class_converter, because it needs the textInput class too. | from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload",
"passwordinput":"passwordinput textInput"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
| <commit_before>from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
<commit_msg>Add "passwordinput" to class_converter, because it needs the textInput class too.<commit_after> | from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload",
"passwordinput":"passwordinput textInput"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
| from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
Add "passwordinput" to class_converter, because it needs the textInput class too.from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload",
"passwordinput":"passwordinput textInput"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
| <commit_before>from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
<commit_msg>Add "passwordinput" to class_converter, because it needs the textInput class too.<commit_after>from django import template
register = template.Library()
class_converter = {
"textinput":"textinput textInput",
"fileinput":"fileinput fileUpload",
"passwordinput":"passwordinput textInput"
}
@register.filter
def is_checkbox(field):
return field.field.widget.__class__.__name__.lower() == "checkboxinput"
@register.filter
def with_class(field):
class_name = field.field.widget.__class__.__name__.lower()
class_name = class_converter.get(class_name, class_name)
if "class" in field.field.widget.attrs:
css_class = field.field.widget.attrs['class']
if field.field.widget.attrs['class'].find(class_name) == -1:
css_class += " %s" % (class_name,)
else:
css_class = class_name
return field.as_widget(attrs={'class': css_class})
|
c55b0bf0e8249a7ca0b66a9338b777bac574ee61 | openbudget/apps/projects/urls/api.py | openbudget/apps/projects/urls/api.py | from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)$',
api.StateDetail.as_view(),
name='state-detail'
),
)
| from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)/$',
api.StateDetail.as_view(),
name='state-detail'
),
)
| Put back the trailing / and fix it in the JS model | Put back the trailing / and fix it in the JS model
| Python | bsd-3-clause | pwalsh/openbudgets,moshe742/openbudgets,pwalsh/openbudgets,openbudgets/openbudgets,pwalsh/openbudgets,openbudgets/openbudgets,openbudgets/openbudgets,shaib/openbudgets,shaib/openbudgets,moshe742/openbudgets | from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)$',
api.StateDetail.as_view(),
name='state-detail'
),
)
Put back the trailing / and fix it in the JS model | from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)/$',
api.StateDetail.as_view(),
name='state-detail'
),
)
| <commit_before>from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)$',
api.StateDetail.as_view(),
name='state-detail'
),
)
<commit_msg>Put back the trailing / and fix it in the JS model<commit_after> | from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)/$',
api.StateDetail.as_view(),
name='state-detail'
),
)
| from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)$',
api.StateDetail.as_view(),
name='state-detail'
),
)
Put back the trailing / and fix it in the JS modelfrom django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)/$',
api.StateDetail.as_view(),
name='state-detail'
),
)
| <commit_before>from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)$',
api.StateDetail.as_view(),
name='state-detail'
),
)
<commit_msg>Put back the trailing / and fix it in the JS model<commit_after>from django.conf.urls import patterns, url
from openbudget.apps.projects.views import api
urlpatterns = patterns('',
url(r'^$',
api.ProjectList.as_view(),
name='project-list'
),
url(r'^states/$',
api.StateList.as_view(),
name='state-list'
),
url(
r'^(?P<uuid>\w+)/$',
api.ProjectDetail.as_view(),
name='project-detail'
),
url(
r'^states/(?P<uuid>\w+)/$',
api.StateDetail.as_view(),
name='state-detail'
),
)
|
32fa0f4937aa9fc5545aed8d839618579a9b0be4 | markdown_i18n/extension.py | markdown_i18n/extension.py | from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', 200)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
#self.md.treeprocessors.link('i18n', '<toc')
self.toc_found = True
| from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
self.priority = 1
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', self.priority)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
self.md.treeprocessors.add(
'i18n', I18NTreeProcessor(self.md, self), '<toc')
self.toc_found = True
| Fix link previous to TOC and Fix priority | Fix link previous to TOC and Fix priority
| Python | mit | gisce/markdown-i18n | from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', 200)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
#self.md.treeprocessors.link('i18n', '<toc')
self.toc_found = True
Fix link previous to TOC and Fix priority | from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
self.priority = 1
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', self.priority)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
self.md.treeprocessors.add(
'i18n', I18NTreeProcessor(self.md, self), '<toc')
self.toc_found = True
| <commit_before>from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', 200)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
#self.md.treeprocessors.link('i18n', '<toc')
self.toc_found = True
<commit_msg>Fix link previous to TOC and Fix priority<commit_after> | from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
self.priority = 1
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', self.priority)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
self.md.treeprocessors.add(
'i18n', I18NTreeProcessor(self.md, self), '<toc')
self.toc_found = True
| from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', 200)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
#self.md.treeprocessors.link('i18n', '<toc')
self.toc_found = True
Fix link previous to TOC and Fix priorityfrom markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
self.priority = 1
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', self.priority)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
self.md.treeprocessors.add(
'i18n', I18NTreeProcessor(self.md, self), '<toc')
self.toc_found = True
| <commit_before>from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', 200)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
#self.md.treeprocessors.link('i18n', '<toc')
self.toc_found = True
<commit_msg>Fix link previous to TOC and Fix priority<commit_after>from markdown.extensions import Extension
from markdown_i18n.parser import I18NTreeProcessor
class I18NExtension(Extension):
def __init__(self, **kwargs):
self.config = {
'i18n_lang': ['en_US', 'Locale'],
'i18n_dir': ['', 'Path to get the translations and']
}
self.toc_found = False
self.md = None
self.priority = 1
super(I18NExtension, self).__init__(**kwargs)
def extendMarkdown(self, md):
md.registerExtension(self)
md.treeprocessors.register(
I18NTreeProcessor(md, self), 'i18n', self.priority)
self.md = md
def reset(self):
if not self.toc_found and 'toc' in self.md.treeprocessors:
self.md.treeprocessors.add(
'i18n', I18NTreeProcessor(self.md, self), '<toc')
self.toc_found = True
|
db334f19f66a4d842f206696a40ac2d351c774ac | Testing/test_Misc.py | Testing/test_Misc.py | import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
| import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
| Add test for bug involving function definitions that Jordan found. | Add test for bug involving function definitions that Jordan found.
| Python | bsd-3-clause | GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell | import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
Add test for bug involving function definitions that Jordan found. | import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for bug involving function definitions that Jordan found.<commit_after> | import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
| import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
Add test for bug involving function definitions that Jordan found.import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add test for bug involving function definitions that Jordan found.<commit_after>import unittest
import os
import scipy
from SloppyCell.ReactionNetworks import *
from AlgTestNets import algebraic_net_assignment
base_net = algebraic_net_assignment.copy()
class test_Misc(unittest.TestCase):
def test_AssignedVarBug(self):
""" Test handling of assigned variables initialized to concentration
'None'"""
# This used to raise an exception.
net = base_net.copy('test')
net.add_species('tester', 'cell', None)
net.add_assignment_rule('tester', 'X0')
net.updateAssignedVars(1.0)
def test_ChangingFunctionDefs(self):
"""
Test whether changing function definitions are handled correctly.
"""
net = Network('test')
net.add_parameter('x', 0.0)
net.add_rate_rule('x', 'f(1)')
net.add_func_def('f', ['x'], 'x+2')
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 30)
# It's not clear to me why this version wasn't causing failures
# before...
#net.remove_component('f')
#net.add_func_def('f', ['x'], 'x+4')
net.functionDefinitions.get('f').math = 'x+4'
traj = Dynamics.integrate(net, [0, 10])
self.assertAlmostEqual(traj.get_var_val('x', 10), 50)
suite = unittest.makeSuite(test_Misc)
if __name__ == '__main__':
unittest.main()
|
f89c60e6ff2c846aacd39db8488de1300b156a71 | src/membership/web/views.py | src/membership/web/views.py | """
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
m.update(self.request.form)
if (not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({'ok': True})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
| """
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
if (not self.try_update_model(m) or
not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
| Use try_update_model in sign in handler instead of dict update. | Use try_update_model in sign in handler instead of dict update.
| Python | mit | akornatskyy/sample-blog-api,akornatskyy/sample-blog-api | """
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
m.update(self.request.form)
if (not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({'ok': True})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
Use try_update_model in sign in handler instead of dict update. | """
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
if (not self.try_update_model(m) or
not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
| <commit_before>"""
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
m.update(self.request.form)
if (not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({'ok': True})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
<commit_msg>Use try_update_model in sign in handler instead of dict update.<commit_after> | """
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
if (not self.try_update_model(m) or
not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
| """
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
m.update(self.request.form)
if (not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({'ok': True})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
Use try_update_model in sign in handler instead of dict update."""
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
if (not self.try_update_model(m) or
not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
| <commit_before>"""
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
m.update(self.request.form)
if (not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({'ok': True})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
<commit_msg>Use try_update_model in sign in handler instead of dict update.<commit_after>"""
"""
from wheezy.core.collections import attrdict
from wheezy.core.comp import u
from wheezy.security import Principal
from wheezy.web.authorization import authorize
from shared.views import APIHandler
from membership.validation import credential_validator
class SignInHandler(APIHandler):
def post(self):
m = attrdict(username=u(''), password=u(''))
if (not self.try_update_model(m) or
not self.validate(m, credential_validator) or
not self.authenticate(m)):
return self.json_errors()
return self.json_response({'username': m.username})
def authenticate(self, credential):
with self.factory('ro') as f:
user = f.membership.authenticate(credential)
if not user:
return False
self.principal = Principal(id=user['id'])
return True
class SignUpHandler(APIHandler):
def post(self):
m = attrdict(email=u(''), username=u(''), password=u(''))
m.update(self.request.form)
if not self.signup(m):
return self.json_errors()
return self.json_response({})
def signup(self, m):
self.error('This feature is not available yet.')
return False
class SignOutHandler(APIHandler):
def get(self):
del self.principal
return self.json_response({})
class UserHandler(APIHandler):
@authorize
def get(self):
return self.json_response({'username': self.principal.id})
|
7288badce0b2ccf78cf4fbd041b5cf909343cc46 | profile_collection/startup/80-areadetector.py | profile_collection/startup/80-areadetector.py | from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
| from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
#shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
shctl1 = EpicsSignal('XF:28IDC-ES:1{Sh:Exp}Cmd-Cmd', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
# shutter=shctl1,
# shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
| Put shutter control entirely in the user's hands | [CFG] Put shutter control entirely in the user's hands
| Python | bsd-2-clause | NSLS-II-XPD/ipython_ophyd,NSLS-II-XPD/ipython_ophyd | from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
[CFG] Put shutter control entirely in the user's hands | from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
#shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
shctl1 = EpicsSignal('XF:28IDC-ES:1{Sh:Exp}Cmd-Cmd', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
# shutter=shctl1,
# shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
| <commit_before>from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
<commit_msg>[CFG] Put shutter control entirely in the user's hands<commit_after> | from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
#shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
shctl1 = EpicsSignal('XF:28IDC-ES:1{Sh:Exp}Cmd-Cmd', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
# shutter=shctl1,
# shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
| from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
[CFG] Put shutter control entirely in the user's handsfrom ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
#shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
shctl1 = EpicsSignal('XF:28IDC-ES:1{Sh:Exp}Cmd-Cmd', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
# shutter=shctl1,
# shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
| <commit_before>from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
shutter=shctl1,
shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
<commit_msg>[CFG] Put shutter control entirely in the user's hands<commit_after>from ophyd.controls.area_detector import (AreaDetectorFileStoreHDF5,
AreaDetectorFileStoreTIFF,
AreaDetectorFileStoreTIFFSquashing)
# from shutter import sh1
#shctl1 = EpicsSignal('XF:28IDC-ES:1{Det:PE1}cam1:ShutterMode', name='shctl1')
shctl1 = EpicsSignal('XF:28IDC-ES:1{Sh:Exp}Cmd-Cmd', name='shctl1')
pe1 = AreaDetectorFileStoreTIFFSquashing(
'XF:28IDC-ES:1{Det:PE1}',
name='pe1',
stats=[],
ioc_file_path = 'H:/pe1_data',
file_path = '/home/xf28id1/pe1_data',
# shutter=shctl1,
# shutter_val=(1, 0)
)
# Dan and Sanjit commented this out in June.
#shctl2 = EpicsSignal('XF:28IDC-ES:1{Det:PE2}cam1:ShutterMode', name='shctl2')
#pe2 = AreaDetectorFileStoreTIFFSquashing(
# 'XF:28IDC-ES:1{Det:PE2}',
# name='pe2',
# stats=[],
# ioc_file_path = 'G:/pe2_data',
# file_path = '/home/xf28id1/pe2_data',
# shutter=shctl2,
# shutter_val=(1,0))
|
9c7f842c0766603d02c9ee1b5fcc7f2e8f6b1a64 | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print 'db'
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | Create documentation of DataSource Settings | 8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8 | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print 'db'
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print 'db'
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after> | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print 'db'
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print 'db'
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
for db in dbs:
db = db.split('(')
n = 0
for i in db:
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(i).replace('"',''))
n += 1
print dblist
for db in dblist:
print db
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' ) |
8086005811314734acde07f8cce481f48967edf5 | ibmcnx/doc/DataSources.py | ibmcnx/doc/DataSources.py | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.split('(')
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | Create documentation of DataSource Settings | : Create documentation of DataSource Settings
Task-Url: | Python | apache-2.0 | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ): Create documentation of DataSource Settings
Task-Url: | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.split('(')
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>: Create documentation of DataSource Settings
Task-Url: <commit_after> | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.split('(')
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ): Create documentation of DataSource Settings
Task-Url: ######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.split('(')
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) | <commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.splitlines()
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>: Create documentation of DataSource Settings
Task-Url: <commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
print AdminControl.getCell()
cell = "/Cell:" + AdminControl.getCell() + "/"
cellid = AdminConfig.getid( cell )
dbs = AdminConfig.list( 'DataSource', str(cellid) )
dbs = dbs.split('(')
print dbs
for db in dbs.splitlines():
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 ) |
62d65f6db0b4507fdc78ef3934e80c4b2760c85d | accounts/backends.py | accounts/backends.py | from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_or_create_user(self, user_id):
# FIXME: remove duplication with authenticate()
user = self.get_user(user_id)
if not user:
try:
passwd = nis.match(username, 'passwd').split(':')
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| Add a little function to make it easier to create users on-the-fly via NIS | Add a little function to make it easier to create users on-the-fly via NIS
| Python | mit | chazy/reviewboard,beol/reviewboard,1tush/reviewboard,1tush/reviewboard,KnowNo/reviewboard,asutherland/opc-reviewboard,KnowNo/reviewboard,reviewboard/reviewboard,custode/reviewboard,chazy/reviewboard,KnowNo/reviewboard,atagar/ReviewBoard,1tush/reviewboard,sgallagher/reviewboard,chazy/reviewboard,Khan/reviewboard,chazy/reviewboard,asutherland/opc-reviewboard,atagar/ReviewBoard,brennie/reviewboard,1tush/reviewboard,custode/reviewboard,Khan/reviewboard,chazy/reviewboard,chazy/reviewboard,bkochendorfer/reviewboard,1tush/reviewboard,asutherland/opc-reviewboard,atagar/ReviewBoard,atagar/ReviewBoard,atagar/ReviewBoard,reviewboard/reviewboard,chazy/reviewboard,beol/reviewboard,asutherland/opc-reviewboard,beol/reviewboard,reviewboard/reviewboard,Khan/reviewboard,reviewboard/reviewboard,Khan/reviewboard,bkochendorfer/reviewboard,custode/reviewboard,1tush/reviewboard,atagar/ReviewBoard,sgallagher/reviewboard,davidt/reviewboard,davidt/reviewboard,1tush/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,Khan/reviewboard,Khan/reviewboard,atagar/ReviewBoard,1tush/reviewboard,custode/reviewboard,chazy/reviewboard,1tush/reviewboard,brennie/reviewboard,chipx86/reviewboard,beol/reviewboard,chipx86/reviewboard,atagar/ReviewBoard,bkochendorfer/reviewboard,Khan/reviewboard,Khan/reviewboard,brennie/reviewboard,chazy/reviewboard,davidt/reviewboard,atagar/ReviewBoard,KnowNo/reviewboard,bkochendorfer/reviewboard,davidt/reviewboard,chipx86/reviewboard,Khan/reviewboard,brennie/reviewboard,sgallagher/reviewboard | from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add a little function to make it easier to create users on-the-fly via NIS | from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_or_create_user(self, user_id):
# FIXME: remove duplication with authenticate()
user = self.get_user(user_id)
if not user:
try:
passwd = nis.match(username, 'passwd').split(':')
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| <commit_before>from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add a little function to make it easier to create users on-the-fly via NIS<commit_after> | from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_or_create_user(self, user_id):
# FIXME: remove duplication with authenticate()
user = self.get_user(user_id)
if not user:
try:
passwd = nis.match(username, 'passwd').split(':')
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
Add a little function to make it easier to create users on-the-fly via NISfrom django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_or_create_user(self, user_id):
# FIXME: remove duplication with authenticate()
user = self.get_user(user_id)
if not user:
try:
passwd = nis.match(username, 'passwd').split(':')
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| <commit_before>from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
<commit_msg>Add a little function to make it easier to create users on-the-fly via NIS<commit_after>from django.conf import settings
from django.contrib.auth.models import User, check_password
import crypt
import nis
class NISBackend:
"""
Authenticate against a user on an NIS server.
"""
def authenticate(self, username, password):
user = None
try:
passwd = nis.match(username, 'passwd').split(':')
original_crypted = passwd[1]
new_crypted = crypt.crypt(password, original_crypted[:2])
if original_crypted == new_crypted:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# Create a new user.
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_or_create_user(self, user_id):
# FIXME: remove duplication with authenticate()
user = self.get_user(user_id)
if not user:
try:
passwd = nis.match(username, 'passwd').split(':')
first_name, last_name = passwd[4].split(' ', 2)
email = '%s@%s' % (username, settings.NIS_EMAIL_DOMAIN)
user = User(username=username,
password='',
first_name=first_name,
last_name=last_name,
email=email)
user.is_staff = False
user.is_superuser = False
user.save()
except nis.error:
pass
return user
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
|
43cd20e94c01e9364d8b0b2e50c701810d68b491 | adhocracy4/filters/views.py | adhocracy4/filters/views.py | from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter(self):
return self.filter_set(
self.request.GET,
request=self.request
)
def get_queryset(self):
qs = self.filter().qs
return qs
| from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter_kwargs(self):
default_kwargs = {
'data': self.request.GET,
'request': self.request,
'queryset': super().get_queryset()
}
return default_kwargs
def filter(self):
return self.filter_set(
**self.filter_kwargs()
)
def get_queryset(self):
qs = self.filter().qs
return qs
| Allow to override kwargs of filter | Allow to override kwargs of filter
| Python | agpl-3.0 | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter(self):
return self.filter_set(
self.request.GET,
request=self.request
)
def get_queryset(self):
qs = self.filter().qs
return qs
Allow to override kwargs of filter | from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter_kwargs(self):
default_kwargs = {
'data': self.request.GET,
'request': self.request,
'queryset': super().get_queryset()
}
return default_kwargs
def filter(self):
return self.filter_set(
**self.filter_kwargs()
)
def get_queryset(self):
qs = self.filter().qs
return qs
| <commit_before>from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter(self):
return self.filter_set(
self.request.GET,
request=self.request
)
def get_queryset(self):
qs = self.filter().qs
return qs
<commit_msg>Allow to override kwargs of filter<commit_after> | from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter_kwargs(self):
default_kwargs = {
'data': self.request.GET,
'request': self.request,
'queryset': super().get_queryset()
}
return default_kwargs
def filter(self):
return self.filter_set(
**self.filter_kwargs()
)
def get_queryset(self):
qs = self.filter().qs
return qs
| from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter(self):
return self.filter_set(
self.request.GET,
request=self.request
)
def get_queryset(self):
qs = self.filter().qs
return qs
Allow to override kwargs of filterfrom django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter_kwargs(self):
default_kwargs = {
'data': self.request.GET,
'request': self.request,
'queryset': super().get_queryset()
}
return default_kwargs
def filter(self):
return self.filter_set(
**self.filter_kwargs()
)
def get_queryset(self):
qs = self.filter().qs
return qs
| <commit_before>from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter(self):
return self.filter_set(
self.request.GET,
request=self.request
)
def get_queryset(self):
qs = self.filter().qs
return qs
<commit_msg>Allow to override kwargs of filter<commit_after>from django.views import generic
class FilteredListView(generic.ListView):
"""List view with support for filtering and sorting via django-filter.
Usage:
Set filter_set to your django_filters.FilterSet definition.
Use view.filter.form in the template to access the filter form.
Note:
Always call super().get_queryset() when customizing get_queryset() to
include the filter functionality.
"""
filter_set = None
def filter_kwargs(self):
default_kwargs = {
'data': self.request.GET,
'request': self.request,
'queryset': super().get_queryset()
}
return default_kwargs
def filter(self):
return self.filter_set(
**self.filter_kwargs()
)
def get_queryset(self):
qs = self.filter().qs
return qs
|
5f409fd075e1bf4d4d58cb280f25761965f6a446 | url_shortener/__main__.py | url_shortener/__main__.py | # -*- coding: utf-8 -*-
from url_shortener import app, custom_config_loaded, views
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
custom_config_loaded.send(app)
app.run()
| # -*- coding: utf-8 -*-
from url_shortener import app, views
from url_shortener.validation import configure_url_validator
from url_shortener.models import configure_random_factory
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
configure_url_validator(app)
configure_random_factory(app)
app.run()
| Refactor applying custom configuration after it is loaded | Refactor applying custom configuration after it is loaded
The call sending custom_config_loaded is being replaced with direct
calls to the functions configure_random_factory and configure_url_validator.
| Python | mit | piotr-rusin/url-shortener,piotr-rusin/url-shortener | # -*- coding: utf-8 -*-
from url_shortener import app, custom_config_loaded, views
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
custom_config_loaded.send(app)
app.run()
Refactor applying custom configuration after it is loaded
The call sending custom_config_loaded is being replaced with direct
calls to the functions configure_random_factory and configure_url_validator. | # -*- coding: utf-8 -*-
from url_shortener import app, views
from url_shortener.validation import configure_url_validator
from url_shortener.models import configure_random_factory
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
configure_url_validator(app)
configure_random_factory(app)
app.run()
| <commit_before># -*- coding: utf-8 -*-
from url_shortener import app, custom_config_loaded, views
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
custom_config_loaded.send(app)
app.run()
<commit_msg>Refactor applying custom configuration after it is loaded
The call sending custom_config_loaded is being replaced with direct
calls to the functions configure_random_factory and configure_url_validator.<commit_after> | # -*- coding: utf-8 -*-
from url_shortener import app, views
from url_shortener.validation import configure_url_validator
from url_shortener.models import configure_random_factory
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
configure_url_validator(app)
configure_random_factory(app)
app.run()
| # -*- coding: utf-8 -*-
from url_shortener import app, custom_config_loaded, views
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
custom_config_loaded.send(app)
app.run()
Refactor applying custom configuration after it is loaded
The call sending custom_config_loaded is being replaced with direct
calls to the functions configure_random_factory and configure_url_validator.# -*- coding: utf-8 -*-
from url_shortener import app, views
from url_shortener.validation import configure_url_validator
from url_shortener.models import configure_random_factory
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
configure_url_validator(app)
configure_random_factory(app)
app.run()
| <commit_before># -*- coding: utf-8 -*-
from url_shortener import app, custom_config_loaded, views
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
custom_config_loaded.send(app)
app.run()
<commit_msg>Refactor applying custom configuration after it is loaded
The call sending custom_config_loaded is being replaced with direct
calls to the functions configure_random_factory and configure_url_validator.<commit_after># -*- coding: utf-8 -*-
from url_shortener import app, views
from url_shortener.validation import configure_url_validator
from url_shortener.models import configure_random_factory
log_file = app.config['LOG_FILE']
if not app.debug and log_file is not None:
import logging
from logging.handlers import TimedRotatingFileHandler
file_handler = TimedRotatingFileHandler(log_file, when='d')
file_handler.setLevel(logging.WARNING)
app.logger.addHandler(file_handler)
app.config.from_envvar('URL_SHORTENER_CONFIGURATION')
configure_url_validator(app)
configure_random_factory(app)
app.run()
|
a56aec4868f41022f6131d81d896da2a05717c89 | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.1',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.2',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
'pyenchant',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| Use pyenchant and bump to v0.0.2 | Use pyenchant and bump to v0.0.2
| Python | mit | likaiguo/etl_utils,fi-ecavc/etl_utils,17zuoye/etl_utils,sdgdsffdsfff/etl_utils,likaiguo/etl_utils,Luiti/etl_utils,sdgdsffdsfff/etl_utils,17zuoye/etl_utils,fi-ecavc/etl_utils,likaiguo/etl_utils,Luiti/etl_utils | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.1',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
Use pyenchant and bump to v0.0.2 | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.2',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
'pyenchant',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| <commit_before># -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.1',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
<commit_msg>Use pyenchant and bump to v0.0.2<commit_after> | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.2',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
'pyenchant',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.1',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
Use pyenchant and bump to v0.0.2# -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.2',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
'pyenchant',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| <commit_before># -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.1',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
<commit_msg>Use pyenchant and bump to v0.0.2<commit_after># -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='etl_utils',
version='0.0.2',
url='http://github.com/mvj3/etl_utils/',
license='MIT',
author='David Chen',
author_email=''.join(reversed("moc.liamg@emojvm")),
description='etl utils',
long_description='etl utils',
packages=['etl_utils'],
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'fast_object_id >= 0.0.2',
'nltk',
'marisa_trie',
'werkzeug',
'lxml >= 3.3.5',
'pyenchant',
],
classifiers=[
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
2ce711aa3584f4417600ea02166c05bd9f80273f | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py']
)
| from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py', 'scripts/svtool_to_vcf.py'],
dependency_links = ["https://pypi.python.org/packages/source/p/pysam/pysam-0.7.7.tar.gz"]
)
| Add another script in install and add a dependency link for pysam | Add another script in install and add a dependency link for pysam
| Python | bsd-2-clause | chapmanb/metasv,bioinform/metasv,chapmanb/metasv,bioinform/metasv,msahraeian/metasv,msahraeian/metasv,bioinform/metasv,msahraeian/metasv,chapmanb/metasv | from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py']
)
Add another script in install and add a dependency link for pysam | from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py', 'scripts/svtool_to_vcf.py'],
dependency_links = ["https://pypi.python.org/packages/source/p/pysam/pysam-0.7.7.tar.gz"]
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py']
)
<commit_msg>Add another script in install and add a dependency link for pysam<commit_after> | from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py', 'scripts/svtool_to_vcf.py'],
dependency_links = ["https://pypi.python.org/packages/source/p/pysam/pysam-0.7.7.tar.gz"]
)
| from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py']
)
Add another script in install and add a dependency link for pysamfrom setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py', 'scripts/svtool_to_vcf.py'],
dependency_links = ["https://pypi.python.org/packages/source/p/pysam/pysam-0.7.7.tar.gz"]
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py']
)
<commit_msg>Add another script in install and add a dependency link for pysam<commit_after>from setuptools import setup, find_packages
setup(
name='MetaSV',
version='0.1-alpha',
description='MetaSV: An accurate and integrative structural-variant caller for next generation sequencing',
author='Bina Technologies',
author_email='rd@bina.com',
url='https://github.com/bioinform/metasv',
packages = find_packages(),
install_requires = ["cython", "pysam==0.7.7", "pybedtools", "pyvcf"],
package_data = {"metasv": ["resources/*"]},
scripts=['scripts/run_metasv.py', 'scripts/svtool_to_vcf.py'],
dependency_links = ["https://pypi.python.org/packages/source/p/pysam/pysam-0.7.7.tar.gz"]
)
|
4c1ff61540a6f97a80b150f337e892a728d9944b | feedhq/feeds/management/commands/favicons.py | feedhq/feeds/management/commands/favicons.py | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import Feed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = Feed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
| from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import UniqueFeed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = UniqueFeed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
| Update favicon based on UniqueFeed values, not Feed | Update favicon based on UniqueFeed values, not Feed
| Python | bsd-3-clause | feedhq/feedhq,feedhq/feedhq,rmoorman/feedhq,rmoorman/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,feedhq/feedhq,vincentbernat/feedhq,vincentbernat/feedhq,rmoorman/feedhq,rmoorman/feedhq,vincentbernat/feedhq,feedhq/feedhq,rmoorman/feedhq | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import Feed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = Feed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
Update favicon based on UniqueFeed values, not Feed | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import UniqueFeed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = UniqueFeed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
| <commit_before>from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import Feed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = Feed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
<commit_msg>Update favicon based on UniqueFeed values, not Feed<commit_after> | from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import UniqueFeed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = UniqueFeed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
| from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import Feed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = Feed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
Update favicon based on UniqueFeed values, not Feedfrom optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import UniqueFeed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = UniqueFeed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
| <commit_before>from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import Feed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = Feed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
<commit_msg>Update favicon based on UniqueFeed values, not Feed<commit_after>from optparse import make_option
from django.conf import settings
from django.core.management.base import BaseCommand
from django.db import connection
from raven import Client
from ...models import UniqueFeed, Favicon
class Command(BaseCommand):
"""Fetches favicon updates and saves them if there are any"""
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Force update of all existing favicons',
),
)
def handle(self, *args, **kwargs):
links = UniqueFeed.objects.values_list('link', flat=True).distinct()
for link in links:
try:
Favicon.objects.update_favicon(link,
force_update=kwargs['all'])
except Exception:
if settings.DEBUG or not hasattr(settings, 'SENTRY_DSN'):
raise
else:
client = Client(dsn=settings.SENTRY_DSN)
client.captureException()
connection.close()
|
5ed2886fa89d25da93ca12ceb9d1f9f25dec19fc | apps/challenge/management/commands/generate_challenge_feed_1st_complete.py | apps/challenge/management/commands/generate_challenge_feed_1st_complete.py | from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('-completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
| from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
| Fix order for 'first to complete' calculation | Fix order for 'first to complete' calculation
| Python | bsd-3-clause | mfitzp/smrtr,mfitzp/smrtr | from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('-completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
Fix order for 'first to complete' calculation | from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
| <commit_before>from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('-completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
<commit_msg>Fix order for 'first to complete' calculation<commit_after> | from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
| from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('-completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
Fix order for 'first to complete' calculationfrom optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
| <commit_before>from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('-completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
<commit_msg>Fix order for 'first to complete' calculation<commit_after>from optparse import make_option
import sys
from django.core.management.base import BaseCommand
from challenge.models import Challenge, UserChallenge
from resources.models import Resource
from django.conf import settings
from django.db.models import Avg, Max, Min, Count
class Command(BaseCommand):
args = ""
help = "Update challenge total_questions and total_resources (post db import)"
def handle(self, *args, **options):
from wallextend.models import add_extended_wallitem
challenges = Challenge.objects.filter(userchallenge__status=2).distinct()
for challenge in challenges:
userchallenge = challenge.userchallenge_set.order_by('completed')[0]
add_extended_wallitem(challenge.wall,userchallenge.user,created_at=userchallenge.completed,template_name='challenge_1stcomplete.html',extra_context={
'body':'is the first to complete!',
'challenge': challenge,
'userchallenge': userchallenge,
})
|
5d36b16fde863cccf404f658f53eac600ac9ddb1 | foomodules/link_harvester/common_handlers.py | foomodules/link_harvester/common_handlers.py | import re
import socket
import urllib
from bs4 import BeautifulSoup
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError):
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| import logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| Print warning when wurstball downloads fail | Print warning when wurstball downloads fail
| Python | mit | horazont/xmpp-crowd | import re
import socket
import urllib
from bs4 import BeautifulSoup
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError):
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
Print warning when wurstball downloads fail | import logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| <commit_before>import re
import socket
import urllib
from bs4 import BeautifulSoup
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError):
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
<commit_msg>Print warning when wurstball downloads fail<commit_after> | import logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| import re
import socket
import urllib
from bs4 import BeautifulSoup
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError):
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
Print warning when wurstball downloads failimport logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| <commit_before>import re
import socket
import urllib
from bs4 import BeautifulSoup
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError):
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
<commit_msg>Print warning when wurstball downloads fail<commit_after>import logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if WURSTBALL_RE.match(metadata.url) is None:
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
|
5a8d7375b617bd5605bce5f09a4caedef170a85c | gbpservice/neutron/db/migration/cli.py | gbpservice/neutron/db/migration/cli.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF()
CONF.command.func(config, CONF.command.name)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF(project='neutron')
CONF.command.func(config, CONF.command.name)
| Set project when doing neutron DB migrations | Set project when doing neutron DB migrations
That way, the default configuration files/dirs from the neutron
projects are read when doing the DB migrations.
This is useful if eg. some configuration files are in
/etc/neutron/neutron.conf.d/ . Theses files will then be automatically
evaluated.
Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62
| Python | apache-2.0 | noironetworks/group-based-policy,stackforge/group-based-policy,stackforge/group-based-policy,noironetworks/group-based-policy | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF()
CONF.command.func(config, CONF.command.name)
Set project when doing neutron DB migrations
That way, the default configuration files/dirs from the neutron
projects are read when doing the DB migrations.
This is useful if eg. some configuration files are in
/etc/neutron/neutron.conf.d/ . Theses files will then be automatically
evaluated.
Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF(project='neutron')
CONF.command.func(config, CONF.command.name)
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF()
CONF.command.func(config, CONF.command.name)
<commit_msg>Set project when doing neutron DB migrations
That way, the default configuration files/dirs from the neutron
projects are read when doing the DB migrations.
This is useful if eg. some configuration files are in
/etc/neutron/neutron.conf.d/ . Theses files will then be automatically
evaluated.
Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF(project='neutron')
CONF.command.func(config, CONF.command.name)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF()
CONF.command.func(config, CONF.command.name)
Set project when doing neutron DB migrations
That way, the default configuration files/dirs from the neutron
projects are read when doing the DB migrations.
This is useful if eg. some configuration files are in
/etc/neutron/neutron.conf.d/ . Theses files will then be automatically
evaluated.
Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF(project='neutron')
CONF.command.func(config, CONF.command.name)
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF()
CONF.command.func(config, CONF.command.name)
<commit_msg>Set project when doing neutron DB migrations
That way, the default configuration files/dirs from the neutron
projects are read when doing the DB migrations.
This is useful if eg. some configuration files are in
/etc/neutron/neutron.conf.d/ . Theses files will then be automatically
evaluated.
Change-Id: I4997a86c4df5fa45f7682d653a5e66b1ae184a62<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.db.migration.cli import * # noqa
def main():
config = alembic_config.Config(
os.path.join(os.path.dirname(__file__), 'alembic.ini'))
config.set_main_option(
'script_location',
'gbpservice.neutron.db.migration:alembic_migrations')
config.neutron_config = CONF
CONF(project='neutron')
CONF.command.func(config, CONF.command.name)
|
a58e035027c732f519791fe587ddd509c7013344 | mail/tests/handlers/react_tests.py | mail/tests/handlers/react_tests.py | from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
| from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
def test_react_for_user_project():
"""
Then make sure that project react messages for existing user queued properly.
"""
dest_addr = "docs.index@test_user2.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing user queued properly")
| Add tests for sending messages to "user" projects | Add tests for sending messages to "user" projects
| Python | apache-2.0 | heiths/allura,leotrubach/sourceforge-allura,lym/allura-git,lym/allura-git,apache/allura,leotrubach/sourceforge-allura,apache/incubator-allura,apache/allura,apache/incubator-allura,Bitergia/allura,Bitergia/allura,heiths/allura,lym/allura-git,apache/allura,lym/allura-git,leotrubach/sourceforge-allura,apache/incubator-allura,apache/allura,Bitergia/allura,leotrubach/sourceforge-allura,heiths/allura,Bitergia/allura,Bitergia/allura,apache/allura,apache/incubator-allura,heiths/allura,lym/allura-git,heiths/allura | from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
Add tests for sending messages to "user" projects | from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
def test_react_for_user_project():
"""
Then make sure that project react messages for existing user queued properly.
"""
dest_addr = "docs.index@test_user2.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing user queued properly")
| <commit_before>from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
<commit_msg>Add tests for sending messages to "user" projects<commit_after> | from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
def test_react_for_user_project():
"""
Then make sure that project react messages for existing user queued properly.
"""
dest_addr = "docs.index@test_user2.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing user queued properly")
| from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
Add tests for sending messages to "user" projectsfrom nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
def test_react_for_user_project():
"""
Then make sure that project react messages for existing user queued properly.
"""
dest_addr = "docs.index@test_user2.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing user queued properly")
| <commit_before>from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
<commit_msg>Add tests for sending messages to "user" projects<commit_after>from nose.tools import *
from lamson.testing import *
import os
from lamson import server
relay = relay(port=8823)
client = RouterConversation("queuetester@localhost", "requests_tests")
confirm_format = "testing-confirm-[0-9]+@"
noreply_format = "testing-noreply@"
host = "localhost"
def test_react_for_existing_project():
"""
Then make sure that project react messages for existing project queued properly.
"""
dest_addr = "docs.index@test.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing project queued properly")
def test_react_for_bad_project():
"""
Then make sure that project react messages for non-existing project dropped properly.
"""
dest_addr = "docs.index@badproject.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for non-existing project dropped properly")
def test_react_for_user_project():
"""
Then make sure that project react messages for existing user queued properly.
"""
dest_addr = "docs.index@test_user2.%s" % host
client.begin()
client.say(dest_addr, "Test project react messages for existing user queued properly")
|
1ff4b0473c79150d5387ed2174b120128d465737 | app.py | app.py | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
if __name__ == "__main__":
app.run();
| from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
@app.route("/user/<username>")
def show_user(username):
return "User page for user " + username
@app.route("/game/<gamename>")
def show_game(gamename):
return "Game page for game " + gamename
@app.route("/game/<gamename>/submit")
def show_submit_score(gamename):
return "Submit a score for game " + gamename
@app.route("/game/<gamename>/leaderboard")
def show_leaderboard(gamename):
return "Show the leaderboard for game " + gamename
if __name__ == "__main__":
app.run();
| Add stub methods for expected paths | Add stub methods for expected paths
| Python | mit | JamesLaverack/scoreboard,JamesLaverack/scoreboard,JamesLaverack/scoreboard | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
if __name__ == "__main__":
app.run();
Add stub methods for expected paths | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
@app.route("/user/<username>")
def show_user(username):
return "User page for user " + username
@app.route("/game/<gamename>")
def show_game(gamename):
return "Game page for game " + gamename
@app.route("/game/<gamename>/submit")
def show_submit_score(gamename):
return "Submit a score for game " + gamename
@app.route("/game/<gamename>/leaderboard")
def show_leaderboard(gamename):
return "Show the leaderboard for game " + gamename
if __name__ == "__main__":
app.run();
| <commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
if __name__ == "__main__":
app.run();
<commit_msg>Add stub methods for expected paths<commit_after> | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
@app.route("/user/<username>")
def show_user(username):
return "User page for user " + username
@app.route("/game/<gamename>")
def show_game(gamename):
return "Game page for game " + gamename
@app.route("/game/<gamename>/submit")
def show_submit_score(gamename):
return "Submit a score for game " + gamename
@app.route("/game/<gamename>/leaderboard")
def show_leaderboard(gamename):
return "Show the leaderboard for game " + gamename
if __name__ == "__main__":
app.run();
| from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
if __name__ == "__main__":
app.run();
Add stub methods for expected pathsfrom flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
@app.route("/user/<username>")
def show_user(username):
return "User page for user " + username
@app.route("/game/<gamename>")
def show_game(gamename):
return "Game page for game " + gamename
@app.route("/game/<gamename>/submit")
def show_submit_score(gamename):
return "Submit a score for game " + gamename
@app.route("/game/<gamename>/leaderboard")
def show_leaderboard(gamename):
return "Show the leaderboard for game " + gamename
if __name__ == "__main__":
app.run();
| <commit_before>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
if __name__ == "__main__":
app.run();
<commit_msg>Add stub methods for expected paths<commit_after>from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello, world!"
@app.route("/user/<username>")
def show_user(username):
return "User page for user " + username
@app.route("/game/<gamename>")
def show_game(gamename):
return "Game page for game " + gamename
@app.route("/game/<gamename>/submit")
def show_submit_score(gamename):
return "Submit a score for game " + gamename
@app.route("/game/<gamename>/leaderboard")
def show_leaderboard(gamename):
return "Show the leaderboard for game " + gamename
if __name__ == "__main__":
app.run();
|
6cd1c7a95ca4162643b6d52f4bb82596178fde22 | gaphor/UML/__init__.py | gaphor/UML/__init__.py | # Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
import gaphor.UML.uml2overrides # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
| # Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
import gaphor.UML.uml2overrides # noqa: isort:skip
| Reorder imports in UML module | Reorder imports in UML module
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | # Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
import gaphor.UML.uml2overrides # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
Reorder imports in UML module | # Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
import gaphor.UML.uml2overrides # noqa: isort:skip
| <commit_before># Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
import gaphor.UML.uml2overrides # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
<commit_msg>Reorder imports in UML module<commit_after> | # Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
import gaphor.UML.uml2overrides # noqa: isort:skip
| # Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
import gaphor.UML.uml2overrides # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
Reorder imports in UML module# Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
import gaphor.UML.uml2overrides # noqa: isort:skip
| <commit_before># Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
import gaphor.UML.uml2overrides # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
<commit_msg>Reorder imports in UML module<commit_after># Here, order matters
from gaphor.UML.uml2 import * # noqa: isort:skip
from gaphor.UML.presentation import Presentation # noqa: isort:skip
from gaphor.UML.elementfactory import ElementFactory # noqa: isort:skip
from gaphor.UML import modelfactory as model # noqa: isort:skip
from gaphor.UML.umlfmt import format
from gaphor.UML.umllex import parse
import gaphor.UML.uml2overrides # noqa: isort:skip
|
d89ce876e4cba010b060599d1f23d23f63552ad9 | globaleaks/__init__.py | globaleaks/__init__.py | # -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl"
"pt_BR", "ru", "tr", "vi" ]
| # -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl",
"pt_BR", "ru", "tr", "vi" ]
| Fix typo inside of LANGUAGES_SUPPORTED. | Fix typo inside of LANGUAGES_SUPPORTED.
| Python | agpl-3.0 | vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks | # -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl"
"pt_BR", "ru", "tr", "vi" ]
Fix typo inside of LANGUAGES_SUPPORTED. | # -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl",
"pt_BR", "ru", "tr", "vi" ]
| <commit_before># -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl"
"pt_BR", "ru", "tr", "vi" ]
<commit_msg>Fix typo inside of LANGUAGES_SUPPORTED.<commit_after> | # -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl",
"pt_BR", "ru", "tr", "vi" ]
| # -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl"
"pt_BR", "ru", "tr", "vi" ]
Fix typo inside of LANGUAGES_SUPPORTED.# -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl",
"pt_BR", "ru", "tr", "vi" ]
| <commit_before># -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl"
"pt_BR", "ru", "tr", "vi" ]
<commit_msg>Fix typo inside of LANGUAGES_SUPPORTED.<commit_after># -*- encoding: utf-8 -*-
#
# In here we shall keep track of all variables and objects that should be
# instantiated only once and be common to pieces of GLBackend code.
__version__ = '2.24.16'
DATABASE_VERSION = 6
# Add here by hand the languages supported!
# copy paste format from 'grunt makeTranslations'
LANGUAGES_SUPPORTED = [
{ "code": "en", "name": "English"},
{ "code": "fr", "name": "French"},
{ "code": "hu_HU", "name": "Hungarian (Hungary)"},
{ "code": "it", "name": "Italian"},
{ "code": "nl", "name": "Dutch"},
{ "code": "pt_BR", "name": "Portuguese (Brazil)"},
{ "code": "ru", "name": "Russian" },
{ "code": "tr", "name": "Turkish"},
{ "code": "vi", "name": "Vietnamese"},
]
LANGUAGES_SUPPORTED_CODES = [ "en", "fr", "hu_HU", "it", "nl",
"pt_BR", "ru", "tr", "vi" ]
|
65c3f9fa4e31bc2c1c448846faba4af58bfd5e61 | src/download.py | src/download.py | import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
| import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
| Make dataset directory if it does not exist. | [fix] Make dataset directory if it does not exist.
| Python | mit | dsanno/chainer-cifar | import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
[fix] Make dataset directory if it does not exist. | import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
| <commit_before>import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
<commit_msg>[fix] Make dataset directory if it does not exist.<commit_after> | import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
| import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
[fix] Make dataset directory if it does not exist.import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
| <commit_before>import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
<commit_msg>[fix] Make dataset directory if it does not exist.<commit_after>import tarfile
import os
from six.moves.urllib import request
url_dir = 'https://www.cs.toronto.edu/~kriz/'
file_name = 'cifar-10-python.tar.gz'
save_dir = 'dataset'
tar_path = os.path.join(save_dir, file_name)
if __name__ == '__main__':
if not os.path.exists(save_dir):
os.makedirs(save_dir)
if os.path.exists(tar_path):
print('{:s} already downloaded.'.format(file_name))
else:
print('Downloading {:s}...'.format(file_name))
request.urlretrieve('{:s}{:s}'.format(url_dir, file_name), tar_path)
print('Extracting files...')
with tarfile.open(tar_path, 'r:gz') as f:
f.extractall(save_dir)
|
8836c5a5274c2a3573d2e706b67a1288de6e59bd | utils/repl.py | utils/repl.py | from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
| from nex.state import GlobalState
from nex.reader import Reader
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_dvi_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex')
state.execute_commands(command_grabber, banisher, reader)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
| Insert plain.tex into REPL state | Insert plain.tex into REPL state
| Python | mit | eddiejessup/nex | from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
Insert plain.tex into REPL state | from nex.state import GlobalState
from nex.reader import Reader
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_dvi_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex')
state.execute_commands(command_grabber, banisher, reader)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
| <commit_before>from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
<commit_msg>Insert plain.tex into REPL state<commit_after> | from nex.state import GlobalState
from nex.reader import Reader
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_dvi_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex')
state.execute_commands(command_grabber, banisher, reader)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
| from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
Insert plain.tex into REPL statefrom nex.state import GlobalState
from nex.reader import Reader
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_dvi_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex')
state.execute_commands(command_grabber, banisher, reader)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
| <commit_before>from nex.state import GlobalState
from nex.reader import Reader, EndOfFile
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/example/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_file(state, 'done.dvi')
<commit_msg>Insert plain.tex into REPL state<commit_after>from nex.state import GlobalState
from nex.reader import Reader
from nex.lexer import Lexer
from nex.instructioner import Instructioner
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from nex.parsing.utils import ChunkGrabber
from nex.box_writer import write_to_dvi_file
from nex.utils import TidyEnd
reader = Reader()
state = GlobalState.from_defaults(font_search_paths=['/Users/ejm/projects/nex/fonts'])
font_id = state.define_new_font(file_name='cmr10', at_clause=None)
state.select_font(is_global=True, font_id=font_id)
lexer = Lexer(reader, get_cat_code_func=state.codes.get_cat_code)
instructioner = Instructioner(lexer)
banisher = Banisher(instructioner, state, reader)
command_grabber = ChunkGrabber(banisher, command_parser)
reader.insert_file('/Users/ejm/projects/nex/tex/plain.tex')
state.execute_commands(command_grabber, banisher, reader)
while True:
s = input('In: ')
reader.insert_string(s + '\n')
try:
state.execute_commands(command_grabber, banisher, reader)
except TidyEnd:
break
# out_path = sys.stdout.buffer
write_to_dvi_file(state, 'repl.dvi', write_pdf=True)
|
4324257e5fe1c49281e4844b07d222b68bd45287 | avalon/fusion/lib.py | avalon/fusion/lib.py | import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
| import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
| Fix doctest - not sure why it was failing on the quotation marks | Fix doctest - not sure why it was failing on the quotation marks
| Python | mit | getavalon/core,mindbender-studio/core,mindbender-studio/core,getavalon/core | import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
Fix doctest - not sure why it was failing on the quotation marks | import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
| <commit_before>import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
<commit_msg>Fix doctest - not sure why it was failing on the quotation marks<commit_after> | import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
| import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
Fix doctest - not sure why it was failing on the quotation marksimport re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
| <commit_before>import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
("C:/test", 4, ".exr")
>>> get_frame_path("filename.00.tif")
("filename.", 2, ".tif")
>>> get_frame_path("foobar35.tif")
("foobar", 2, ".tif")
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
<commit_msg>Fix doctest - not sure why it was failing on the quotation marks<commit_after>import re
import os
import contextlib
from . import pipeline
@contextlib.contextmanager
def maintained_selection():
comp = pipeline.get_current_comp()
previous_selection = comp.GetToolList(True).values()
try:
yield
finally:
flow = comp.CurrentFrame.FlowView
flow.Select() # No args equals clearing selection
if previous_selection:
for tool in previous_selection:
flow.Select(tool, True)
def get_frame_path(path):
"""Get filename for the Fusion Saver with padded number as '#'
>>> get_frame_path("C:/test.exr")
('C:/test', 4, '.exr')
>>> get_frame_path("filename.00.tif")
('filename.', 2, '.tif')
>>> get_frame_path("foobar35.tif")
('foobar', 2, '.tif')
Args:
path (str): The path to render to.
Returns:
tuple: head, padding, tail (extension)
"""
filename, ext = os.path.splitext(path)
# Find a final number group
match = re.match('.*?([0-9]+)$', filename)
if match:
padding = len(match.group(1))
# remove number from end since fusion
# will swap it with the frame number
filename = filename[:-padding]
else:
padding = 4 # default Fusion padding
return filename, padding, ext
|
7b08777d77d6cfd5a4eeeee81fb51f5fdedde987 | bumblebee/modules/caffeine.py | bumblebee/modules/caffeine.py | # pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| # pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| Add support for switching dpms | Add support for switching dpms
| Python | mit | tobi-wan-kenobi/bumblebee-status,tobi-wan-kenobi/bumblebee-status | # pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
Add support for switching dpms | # pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| <commit_before># pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
<commit_msg>Add support for switching dpms<commit_after> | # pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| # pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
Add support for switching dpms# pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| <commit_before># pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
<commit_msg>Add support for switching dpms<commit_after># pylint: disable=C0111,R0903
"""Enable/disable automatic screen locking.
Requires the following executables:
* xset
* notify-send
"""
import bumblebee.input
import bumblebee.output
import bumblebee.engine
class Module(bumblebee.engine.Module):
def __init__(self, engine, config):
super(Module, self).__init__(engine, config,
bumblebee.output.Widget(full_text=self.caffeine)
)
engine.input.register_callback(self, button=bumblebee.input.LEFT_MOUSE,
cmd=self._toggle
)
def caffeine(self, widget):
return ""
def state(self, widget):
if self._active():
return "activated"
return "deactivated"
def _active(self):
for line in bumblebee.util.execute("xset q").split("\n"):
if "timeout" in line:
timeout = int(line.split(" ")[4])
if timeout == 0:
return True
return False
return False
def _toggle(self, widget):
if self._active():
bumblebee.util.execute("xset +dpms")
bumblebee.util.execute("xset s default")
bumblebee.util.execute("notify-send \"Out of coffee\"")
else:
bumblebee.util.execute("xset -dpms")
bumblebee.util.execute("xset s off")
bumblebee.util.execute("notify-send \"Consuming caffeine\"")
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
fd46372e7c8b6d0a181b9be0af1d812351874ec4 | furious/extras/appengine/ndb_persistence.py | furious/extras/appengine/ndb_persistence.py |
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
| #
# Copyright 2014 WebFilings, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module contains the default functions to use when performing
persistence operations backed by the App Engine ndb library.
"""
from google.appengine.ext import ndb
class FuriousContextNotFoundError(Exception):
"""FuriousContext entity not found in the datastore."""
class FuriousContext(ndb.Model):
context = ndb.JsonProperty(indexed=False, compressed=True)
@classmethod
def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict())
@classmethod
def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context)
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
| Add simple ndb_persitence Context storage model | Add simple ndb_persitence Context storage model
| Python | apache-2.0 | rosshendrickson-wf/furious,Workiva/furious,rosshendrickson-wf/furious,beaulyddon-wf/furious,mattsanders-wf/furious,andreleblanc-wf/furious,beaulyddon-wf/furious,mattsanders-wf/furious,andreleblanc-wf/furious,Workiva/furious |
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
Add simple ndb_persitence Context storage model | #
# Copyright 2014 WebFilings, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module contains the default functions to use when performing
persistence operations backed by the App Engine ndb library.
"""
from google.appengine.ext import ndb
class FuriousContextNotFoundError(Exception):
"""FuriousContext entity not found in the datastore."""
class FuriousContext(ndb.Model):
context = ndb.JsonProperty(indexed=False, compressed=True)
@classmethod
def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict())
@classmethod
def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context)
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
| <commit_before>
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
<commit_msg>Add simple ndb_persitence Context storage model<commit_after> | #
# Copyright 2014 WebFilings, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module contains the default functions to use when performing
persistence operations backed by the App Engine ndb library.
"""
from google.appengine.ext import ndb
class FuriousContextNotFoundError(Exception):
"""FuriousContext entity not found in the datastore."""
class FuriousContext(ndb.Model):
context = ndb.JsonProperty(indexed=False, compressed=True)
@classmethod
def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict())
@classmethod
def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context)
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
|
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
Add simple ndb_persitence Context storage model#
# Copyright 2014 WebFilings, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module contains the default functions to use when performing
persistence operations backed by the App Engine ndb library.
"""
from google.appengine.ext import ndb
class FuriousContextNotFoundError(Exception):
"""FuriousContext entity not found in the datastore."""
class FuriousContext(ndb.Model):
context = ndb.JsonProperty(indexed=False, compressed=True)
@classmethod
def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict())
@classmethod
def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context)
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
| <commit_before>
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
<commit_msg>Add simple ndb_persitence Context storage model<commit_after>#
# Copyright 2014 WebFilings, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""This module contains the default functions to use when performing
persistence operations backed by the App Engine ndb library.
"""
from google.appengine.ext import ndb
class FuriousContextNotFoundError(Exception):
"""FuriousContext entity not found in the datastore."""
class FuriousContext(ndb.Model):
context = ndb.JsonProperty(indexed=False, compressed=True)
@classmethod
def from_context(cls, context):
"""Create a `cls` entity from a context."""
return cls(id=context.id, context=context.to_dict())
@classmethod
def from_id(cls, id):
"""Load a `cls` entity and instantiate the Context it stores."""
from furious.context import Context
# TODO: Handle exceptions and retries here.
entity = cls.get_by_id(id)
if not entity:
raise FuriousContextNotFoundError(
"Context entity not found for: {}".format(id))
return Context.from_dict(entity.context)
def store_async_result(async):
"""Persist the Async's result to the datastore."""
pass
def store_async_marker(async):
"""Persist a marker indicating the Async ran to the datastore."""
pass
|
03d62abc0f48e49e1bfd672ab6c7c60cd8f6fef5 | users/models.py | users/models.py | from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
return "User(<{}>}".format(self.email)
| from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
if self.email:
return "User(<{}>)".format(self.email)
return "User(<{}>)".format(self.username)
| Fix return string from user model | Fix return string from user model
Also return username if there is no email address set
| Python | mit | Nikola-K/django-template,Nikola-K/django-template | from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
return "User(<{}>}".format(self.email)
Fix return string from user model
Also return username if there is no email address set | from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
if self.email:
return "User(<{}>)".format(self.email)
return "User(<{}>)".format(self.username)
| <commit_before>from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
return "User(<{}>}".format(self.email)
<commit_msg>Fix return string from user model
Also return username if there is no email address set<commit_after> | from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
if self.email:
return "User(<{}>)".format(self.email)
return "User(<{}>)".format(self.username)
| from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
return "User(<{}>}".format(self.email)
Fix return string from user model
Also return username if there is no email address setfrom django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
if self.email:
return "User(<{}>)".format(self.email)
return "User(<{}>)".format(self.username)
| <commit_before>from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
return "User(<{}>}".format(self.email)
<commit_msg>Fix return string from user model
Also return username if there is no email address set<commit_after>from django.contrib.auth.models import AbstractUser
from django.db import models
class Person(AbstractUser):
description = models.TextField(blank=True)
def __str__(self):
if self.email:
return "User(<{}>)".format(self.email)
return "User(<{}>)".format(self.username)
|
38efa77f8831b2fcceb5f86f31a1ec7dc6aa5627 | src/odometry.py | src/odometry.py | #!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
current_odometry = None
def get_odometry(message):
global current_odometry
current_odometry = message
if __name__ == '__main__':
rospy.init_node('odometry')
subscriber = rospy.Subscriber('odom', Odometry, get_odometry)
publisher = rospy.Publisher('odometry_10_hz', Odometry, queue_size=1)
while current_odometry == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_odometry)
rate.sleep()
| #!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Pose
current_pose = None
def get_pose(message):
global current_pose
current_pose = message.pose[0]
if __name__ == '__main__':
rospy.init_node('pose')
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
publisher = rospy.Publisher('pose_10_hz', Pose, queue_size=1)
while current_pose == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_pose)
rate.sleep()
| Change subscribed topic and message type | Change subscribed topic and message type
| Python | mit | bit0001/trajectory_tracking,bit0001/trajectory_tracking | #!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
current_odometry = None
def get_odometry(message):
global current_odometry
current_odometry = message
if __name__ == '__main__':
rospy.init_node('odometry')
subscriber = rospy.Subscriber('odom', Odometry, get_odometry)
publisher = rospy.Publisher('odometry_10_hz', Odometry, queue_size=1)
while current_odometry == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_odometry)
rate.sleep()
Change subscribed topic and message type | #!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Pose
current_pose = None
def get_pose(message):
global current_pose
current_pose = message.pose[0]
if __name__ == '__main__':
rospy.init_node('pose')
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
publisher = rospy.Publisher('pose_10_hz', Pose, queue_size=1)
while current_pose == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_pose)
rate.sleep()
| <commit_before>#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
current_odometry = None
def get_odometry(message):
global current_odometry
current_odometry = message
if __name__ == '__main__':
rospy.init_node('odometry')
subscriber = rospy.Subscriber('odom', Odometry, get_odometry)
publisher = rospy.Publisher('odometry_10_hz', Odometry, queue_size=1)
while current_odometry == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_odometry)
rate.sleep()
<commit_msg>Change subscribed topic and message type<commit_after> | #!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Pose
current_pose = None
def get_pose(message):
global current_pose
current_pose = message.pose[0]
if __name__ == '__main__':
rospy.init_node('pose')
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
publisher = rospy.Publisher('pose_10_hz', Pose, queue_size=1)
while current_pose == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_pose)
rate.sleep()
| #!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
current_odometry = None
def get_odometry(message):
global current_odometry
current_odometry = message
if __name__ == '__main__':
rospy.init_node('odometry')
subscriber = rospy.Subscriber('odom', Odometry, get_odometry)
publisher = rospy.Publisher('odometry_10_hz', Odometry, queue_size=1)
while current_odometry == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_odometry)
rate.sleep()
Change subscribed topic and message type#!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Pose
current_pose = None
def get_pose(message):
global current_pose
current_pose = message.pose[0]
if __name__ == '__main__':
rospy.init_node('pose')
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
publisher = rospy.Publisher('pose_10_hz', Pose, queue_size=1)
while current_pose == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_pose)
rate.sleep()
| <commit_before>#!/usr/bin/env python
import rospy
from nav_msgs.msg import Odometry
current_odometry = None
def get_odometry(message):
global current_odometry
current_odometry = message
if __name__ == '__main__':
rospy.init_node('odometry')
subscriber = rospy.Subscriber('odom', Odometry, get_odometry)
publisher = rospy.Publisher('odometry_10_hz', Odometry, queue_size=1)
while current_odometry == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_odometry)
rate.sleep()
<commit_msg>Change subscribed topic and message type<commit_after>#!/usr/bin/env python
import rospy
from gazebo_msgs.msg import ModelStates
from geometry_msgs.msg import Pose
current_pose = None
def get_pose(message):
global current_pose
current_pose = message.pose[0]
if __name__ == '__main__':
rospy.init_node('pose')
subscriber = rospy.Subscriber('gazebo/model_states', ModelStates, get_pose)
publisher = rospy.Publisher('pose_10_hz', Pose, queue_size=1)
while current_pose == None:
pass
rate = rospy.Rate(10)
while not rospy.is_shutdown():
publisher.publish(current_pose)
rate.sleep()
|
ea2922cabe441abcf0475445d94cffed0d0fa131 | kafka/kafkaConsumer.py | kafka/kafkaConsumer.py | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| Update IP address of kafka consumer | Update IP address of kafka consumer
| Python | apache-2.0 | opencord/voltha,opencord/voltha,opencord/voltha,opencord/voltha,opencord/voltha | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
Update IP address of kafka consumer | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| <commit_before>#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
<commit_msg>Update IP address of kafka consumer<commit_after> | #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| #!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
Update IP address of kafka consumer#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
| <commit_before>#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
#consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
<commit_msg>Update IP address of kafka consumer<commit_after>#!/usr/bin/env python
import threading, logging, time
from kafka import KafkaConsumer
class Consumer(threading.Thread):
daemon = True
def run(self):
consumer = KafkaConsumer(bootstrap_servers='10.100.198.220:9092',
#consumer = KafkaConsumer(bootstrap_servers='10.0.2.15:9092',
auto_offset_reset='earliest')
consumer.subscribe(['voltha-heartbeat'])
for message in consumer:
print (message)
def main():
threads = [
Consumer()
]
for t in threads:
t.start()
time.sleep(3000)
if __name__ == "__main__":
logging.basicConfig(
format='%(asctime)s.%(msecs)s:%(name)s:%(thread)d:%(levelname)s:%(process)d:%(message)s',
level=logging.INFO
)
main()
|
440b8424f11dc1f665bb512d30795c2bb6eda96e | mapentity/tests/models.py | mapentity/tests/models.py | from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
| from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
name = models.CharField(max_length=100, default='Empty')
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
| Add normal field to test model | Add normal field to test model
| Python | bsd-3-clause | Anaethelion/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity,Anaethelion/django-mapentity,makinacorpus/django-mapentity | from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
Add normal field to test model | from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
name = models.CharField(max_length=100, default='Empty')
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
| <commit_before>from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
<commit_msg>Add normal field to test model<commit_after> | from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
name = models.CharField(max_length=100, default='Empty')
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
| from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
Add normal field to test modelfrom django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
name = models.CharField(max_length=100, default='Empty')
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
| <commit_before>from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
<commit_msg>Add normal field to test model<commit_after>from django.db.models import loading
from django.contrib.gis.db import models
from django.contrib.gis.geos import GEOSGeometry
from mapentity.models import MapEntityMixin
class MushroomSpot(models.Model):
name = models.CharField(max_length=100, default='Empty')
serialized = models.CharField(max_length=200, null=True, default=None)
"""geom as python attribute"""
@property
def geom(self):
if self.serialized is None:
return None
return GEOSGeometry(self.serialized)
class WeatherStation(models.Model):
geom = models.PointField(null=True, default=None, srid=2154)
objects = models.GeoManager()
class DummyModel(MapEntityMixin, models.Model):
@classmethod
def get_jsonlist_url(self):
return ''
@classmethod
def get_generic_detail_url(self):
return ''
@classmethod
def get_add_url(self):
return ''
@classmethod
def get_update_url(self):
return ''
@classmethod
def get_delete_url(self):
return ''
loading.cache.loaded = False
|
e34c9ede88524b64b3a84d579718af6766a5e483 | bin/get_templates.py | bin/get_templates.py | #!/usr/bin/env python
import json
import os
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def print_struct(args):
new_, name, args = args
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
map(print_struct, structs)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
| Rename print_struct to generate_template and use multi_map | Rename print_struct to generate_template and use multi_map
| Python | mit | Tactique/game_engine,Tactique/game_engine | #!/usr/bin/env python
import json
import os
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def print_struct(args):
new_, name, args = args
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
map(print_struct, structs)
if __name__ == '__main__':
main()
Rename print_struct to generate_template and use multi_map | #!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import json
import os
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def print_struct(args):
new_, name, args = args
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
map(print_struct, structs)
if __name__ == '__main__':
main()
<commit_msg>Rename print_struct to generate_template and use multi_map<commit_after> | #!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import json
import os
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def print_struct(args):
new_, name, args = args
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
map(print_struct, structs)
if __name__ == '__main__':
main()
Rename print_struct to generate_template and use multi_map#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import json
import os
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def print_struct(args):
new_, name, args = args
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
map(print_struct, structs)
if __name__ == '__main__':
main()
<commit_msg>Rename print_struct to generate_template and use multi_map<commit_after>#!/usr/bin/env python
import json
import os
from lib.functional import multi_map
from engine import types, consts
template_dir = os.path.join(os.environ['PORTER'], 'templates')
structs = (
(types.new_unit, "Tank", (consts.RED,)),
(types.new_attack, "RegularCannon", ()),
(types.new_armor, "WeakMetal", ()),
(types.new_movement, "Treads", ()),
)
def without_trailing_whitespace(string):
def remove_trailing_whitespace(line):
return line.rstrip()
return '\n'.join(map(remove_trailing_whitespace, string.split('\n')))
def generate_template(new_, name, args):
with open(os.path.join(template_dir, '%s.json' % (name,)), 'w') as f:
f.write(
without_trailing_whitespace(
json.dumps(
json.loads(
repr(
new_(name, *args))),
indent=4)))
def main():
if not os.path.exists(template_dir):
os.mkdir(template_dir)
multi_map(generate_template, structs)
if __name__ == '__main__':
main()
|
a8fd3c484e0425168976bd55a687d2c632efa198 | lib/ansiblelint/formatters/__init__.py | lib/ansiblelint/formatters/__init__.py | class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: {2} {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
| class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
| Add brackets around rule ID in parseable format. | Add brackets around rule ID in parseable format.
This formatter was supposed to model after the PEP8 format,
but was incorrect. The actualy format is:
"<filename>:<linenumber>: [<rule.id>] <message>"
| Python | mit | MatrixCrawler/ansible-lint,schlueter/ansible-lint,willthames/ansible-lint,MiLk/ansible-lint,charleswhchan/ansible-lint,dataxu/ansible-lint | class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: {2} {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
Add brackets around rule ID in parseable format.
This formatter was supposed to model after the PEP8 format,
but was incorrect. The actualy format is:
"<filename>:<linenumber>: [<rule.id>] <message>" | class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
| <commit_before>class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: {2} {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
<commit_msg>Add brackets around rule ID in parseable format.
This formatter was supposed to model after the PEP8 format,
but was incorrect. The actualy format is:
"<filename>:<linenumber>: [<rule.id>] <message>"<commit_after> | class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
| class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: {2} {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
Add brackets around rule ID in parseable format.
This formatter was supposed to model after the PEP8 format,
but was incorrect. The actualy format is:
"<filename>:<linenumber>: [<rule.id>] <message>"class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
| <commit_before>class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: {2} {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
<commit_msg>Add brackets around rule ID in parseable format.
This formatter was supposed to model after the PEP8 format,
but was incorrect. The actualy format is:
"<filename>:<linenumber>: [<rule.id>] <message>"<commit_after>class Formatter:
def format(self, match):
formatstr = "[{0}] {1}\n{2}:{3}\n{4}\n"
return formatstr.format(match.rule.id,
match.message,
match.filename,
match.linenumber,
match.line)
class QuietFormatter:
def format(self, match):
formatstr = "[{0}] {1}:{2}"
return formatstr.format(match.rule.id, match.filename,
match.linenumber)
class ParseableFormatter:
def format(self, match):
formatstr = "{0}:{1}: [{2}] {3}"
return formatstr.format(match.filename,
match.linenumber,
match.rule.id,
match.message,
)
|
93abffd833498b4bae083bd70f3f154d9151c384 | src/coordinators/models.py | src/coordinators/models.py | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if user.is_superuser or not user.coordinator or user.coordinator.is_manager:
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if (user.is_superuser
or not hasattr(user, 'coordinator')
or user.coordinator.is_manager):
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| Fix OneToOneField instance check in filter_by_district | Fix OneToOneField instance check in filter_by_district
| Python | mit | mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign,mrts/foodbank-campaign | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if user.is_superuser or not user.coordinator or user.coordinator.is_manager:
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
Fix OneToOneField instance check in filter_by_district | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if (user.is_superuser
or not hasattr(user, 'coordinator')
or user.coordinator.is_manager):
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| <commit_before>from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if user.is_superuser or not user.coordinator or user.coordinator.is_manager:
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
<commit_msg>Fix OneToOneField instance check in filter_by_district<commit_after> | from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if (user.is_superuser
or not hasattr(user, 'coordinator')
or user.coordinator.is_manager):
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if user.is_superuser or not user.coordinator or user.coordinator.is_manager:
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
Fix OneToOneField instance check in filter_by_districtfrom __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if (user.is_superuser
or not hasattr(user, 'coordinator')
or user.coordinator.is_manager):
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
| <commit_before>from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if user.is_superuser or not user.coordinator or user.coordinator.is_manager:
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
<commit_msg>Fix OneToOneField instance check in filter_by_district<commit_after>from __future__ import unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from locations.models import District
class Coordinator(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
is_manager = models.BooleanField()
district = models.ForeignKey(District, verbose_name=_('District'),
blank=True, null=True)
def filter_by_district(qs, user, lookup):
if (user.is_superuser
or not hasattr(user, 'coordinator')
or user.coordinator.is_manager):
return qs
kwargs = {
lookup: user.coordinator.district
}
return qs.filter(**kwargs)
|
53d41d629f0f34bbea3461a37cbe1c8f7f7e15f3 | toolkit/diary/edit_prefs.py | toolkit/diary/edit_prefs.py | import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': True}
PREF_MAP = {'false': False, 'true': True}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
| import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': 'true'}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
| Fix default behavior; popup window pref is (for now) stored as a string 'true'/'false', not a python Bool. | Fix default behavior; popup window pref is (for now) stored as a string 'true'/'false', not a python Bool.
| Python | agpl-3.0 | BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit | import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': True}
PREF_MAP = {'false': False, 'true': True}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
Fix default behavior; popup window pref is (for now) stored as a string 'true'/'false', not a python Bool. | import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': 'true'}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
| <commit_before>import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': True}
PREF_MAP = {'false': False, 'true': True}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
<commit_msg>Fix default behavior; popup window pref is (for now) stored as a string 'true'/'false', not a python Bool.<commit_after> | import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': 'true'}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
| import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': True}
PREF_MAP = {'false': False, 'true': True}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
Fix default behavior; popup window pref is (for now) stored as a string 'true'/'false', not a python Bool.import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': 'true'}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
| <commit_before>import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': True}
PREF_MAP = {'false': False, 'true': True}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
<commit_msg>Fix default behavior; popup window pref is (for now) stored as a string 'true'/'false', not a python Bool.<commit_after>import logging
logger = logging.getLogger(__name__)
KNOWN_PREFS = {'popups': 'true'}
def get_preferences(session):
edit_prefs = {}
for pref, default in KNOWN_PREFS.iteritems():
value = session.get('editpref_' + pref, default)
if value:
edit_prefs[pref] = value
return edit_prefs
def set_preferences(session, prefs_requested):
for pref in KNOWN_PREFS:
value = prefs_requested.get(pref, None)
if value:
value = str(value)[:10] # limit length of stored value
logger.debug("User set pref %s = %s", pref, value)
session['editpref_' + pref] = value
|
9af7c012c8ef2ca2999408abe98bdc3aa0ee1738 | base_partner_merge/__openerp__.py | base_partner_merge/__openerp__.py | {
'name': 'Base Partner Merge',
'author': 'OpenERP S.A.',
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
| {
'name': 'Base Partner Merge',
'author': "OpenERP S.A.,Odoo Community Association (OCA)",
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
| Add OCA as author of OCA addons | Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.
| Python | agpl-3.0 | microcom/partner-contact,brain-tec/partner-contact,microcom/partner-contact,brain-tec/partner-contact | {
'name': 'Base Partner Merge',
'author': 'OpenERP S.A.',
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association. | {
'name': 'Base Partner Merge',
'author': "OpenERP S.A.,Odoo Community Association (OCA)",
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
| <commit_before>{
'name': 'Base Partner Merge',
'author': 'OpenERP S.A.',
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
<commit_msg>Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.<commit_after> | {
'name': 'Base Partner Merge',
'author': "OpenERP S.A.,Odoo Community Association (OCA)",
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
| {
'name': 'Base Partner Merge',
'author': 'OpenERP S.A.',
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.{
'name': 'Base Partner Merge',
'author': "OpenERP S.A.,Odoo Community Association (OCA)",
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
| <commit_before>{
'name': 'Base Partner Merge',
'author': 'OpenERP S.A.',
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
<commit_msg>Add OCA as author of OCA addons
In order to get visibility on https://www.odoo.com/apps the OCA board has
decided to add the OCA as author of all the addons maintained as part of the
association.<commit_after>{
'name': 'Base Partner Merge',
'author': "OpenERP S.A.,Odoo Community Association (OCA)",
'category': 'Generic Modules/Base',
'version': '0.1',
'description': """
backport module, to be removed when we switch to saas2 on the private servers
""",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'base_partner_merge_view.xml',
],
'installable': True,
}
|
9dc6de1a97c18fa03787349ed64c1a4100b5d170 | datapackage_pipelines_od4tj/processors/fix-numbers.py | datapackage_pipelines_od4tj/processors/fix-numbers.py | from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row) | from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row) | Fix bad indentation in processor | Fix bad indentation in processor
| Python | mit | okfn/datapackage_pipelines_od4tj | from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row)Fix bad indentation in processor | from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row) | <commit_before>from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row)<commit_msg>Fix bad indentation in processor<commit_after> | from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row) | from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row)Fix bad indentation in processorfrom datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row) | <commit_before>from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row)<commit_msg>Fix bad indentation in processor<commit_after>from datapackage_pipelines.wrapper import process
def process_row(row, row_index,
spec, resource_index,
parameters, stats):
for f in spec['schema']['fields']:
if 'factor' in f:
factor = {
'1m': 1000000
}[f['factor']]
v = row[f['name']]
if v:
row[f['name']] = v * factor
return row
process(process_row=process_row) |
6cb9b552f30cd25b9266677fd2c13140697e2f20 | thinglang/foundation/templates.py | thinglang/foundation/templates.py | HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile().capitalize()
return '{}Type'.format(name), '{}Instance'.format(name)
| HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile()
formatted_name = name[0].upper() + name[1:]
return '{}Type'.format(formatted_name), '{}Instance'.format(formatted_name)
| Fix capitalization method in class_names | Fix capitalization method in class_names
| Python | mit | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile().capitalize()
return '{}Type'.format(name), '{}Instance'.format(name)
Fix capitalization method in class_names | HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile()
formatted_name = name[0].upper() + name[1:]
return '{}Type'.format(formatted_name), '{}Instance'.format(formatted_name)
| <commit_before>HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile().capitalize()
return '{}Type'.format(name), '{}Instance'.format(name)
<commit_msg>Fix capitalization method in class_names<commit_after> | HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile()
formatted_name = name[0].upper() + name[1:]
return '{}Type'.format(formatted_name), '{}Instance'.format(formatted_name)
| HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile().capitalize()
return '{}Type'.format(name), '{}Instance'.format(name)
Fix capitalization method in class_namesHEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile()
formatted_name = name[0].upper() + name[1:]
return '{}Type'.format(formatted_name), '{}Instance'.format(formatted_name)
| <commit_before>HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile().capitalize()
return '{}Type'.format(name), '{}Instance'.format(name)
<commit_msg>Fix capitalization method in class_names<commit_after>HEADER = """/**
{file_name}
Auto-generated code - do not modify.
thinglang C++ transpiler, 0.0.0
**/
"""
FOUNDATION_ENUM = HEADER + """
#pragma once
#include <string>
{imports}
enum class {name} {{
{values}
}};
"""
FOUNDATION_SWITCH = """
inline auto {func_name}({name} val){{
switch (val){{
{cases}
}}
}}
"""
ENUM_CASE = """
case {enum_class}::{name}:
return {value};"""
TYPE_INSTANTIATION = HEADER + """
{imports}
enum PrimitiveType {{
{primitives}
}};
inline Type create_type(const std::string& type_name){{
{conditions}
throw RuntimeError("Unknown type name: " + type_name);
}}
"""
TYPE_CONDITIONAL = """if(type_name == "{name}") return new {cls_name}();"""
def class_names(name):
name = name.transpile()
formatted_name = name[0].upper() + name[1:]
return '{}Type'.format(formatted_name), '{}Instance'.format(formatted_name)
|
2752c9880934aed1f02ab5e9cc111b07cb449c46 | async_messages/middleware.py | async_messages/middleware.py | from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_request(self, request):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
| from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_response(self, request, response):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
| Add the message during the processing of the response. | Add the message during the processing of the response. | Python | mit | codeinthehole/django-async-messages | from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_request(self, request):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
Add the message during the processing of the response. | from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_response(self, request, response):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
| <commit_before>from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_request(self, request):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
<commit_msg>Add the message during the processing of the response.<commit_after> | from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_response(self, request, response):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
| from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_request(self, request):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
Add the message during the processing of the response.from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_response(self, request, response):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
| <commit_before>from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_request(self, request):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
<commit_msg>Add the message during the processing of the response.<commit_after>from django.contrib import messages
from async_messages import get_message
class AsyncMiddleware(object):
def process_response(self, request, response):
# Check for message for this user and, if it exists,
# call the messages API with it
if not request.user.is_authenticated():
return
msg, level = get_message(request.user)
if msg:
messages.add_message(request, level, msg)
|
68b07eb2a49c56b696ab4d95b32a70a371f9d0f8 | elevator_cli/io.py | elevator_cli/io.py | # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command, args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| Update : uppercase every input commands | Update : uppercase every input commands
| Python | mit | oleiade/Elevator | # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command, args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
Update : uppercase every input commands | # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command, args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
<commit_msg>Update : uppercase every input commands<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| # -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command, args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
Update : uppercase every input commands# -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command, args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
<commit_msg>Update : uppercase every input commands<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2012 theo crevon
#
# See the file LICENSE for copying permission.
from clint.textui import puts, colored
from elevator.utils.patterns import destructurate
from .helpers import FAILURE_STATUS
def prompt(*args, **kwargs):
current_db = kwargs.pop('current_db', 'default')
if current_db:
pattern = '@ Elevator.{db} => '.format(db=current_db)
else:
pattern = '! Offline => '
input_str = raw_input(pattern)
return input_str
def parse_input(input_str, *args, **kwargs):
input_str = input_str.strip().split()
command, args = destructurate(input_str)
return command.upper(), args
def output_result(status, result, *args, **kwargs):
if result:
if status == FAILURE_STATUS:
puts(colored.red(str(result)))
else:
puts(str(result))
|
eb9b1cc747dc807a52ee7d0dec0992eb70005840 | cacao_app/configuracion/models.py | cacao_app/configuracion/models.py | # -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
| # -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
| Set help text for app logo | Set help text for app logo
| Python | bsd-3-clause | CacaoMovil/guia-de-cacao-django,CacaoMovil/guia-de-cacao-django,CacaoMovil/guia-de-cacao-django | # -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
Set help text for app logo | # -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
| <commit_before># -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
<commit_msg>Set help text for app logo<commit_after> | # -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
| # -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
Set help text for app logo# -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
| <commit_before># -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField('Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField('Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen', upload_to='cacao/')
class Meta:
verbose_name = "Configuracion de Aplicación"
<commit_msg>Set help text for app logo<commit_after># -*- coding: utf-8 -*-
from django.db import models
from solo.models import SingletonModel
from ckeditor.fields import RichTextField
class Contacto(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_contacto = RichTextField(
'Informacion de Contacto', config_name='default')
contacto_general = RichTextField('Contacto General', config_name='default')
class Meta:
verbose_name = "Configuracion Contacto"
class Acerca(SingletonModel):
"""
This model store the Contacto object
but this only have one instance
"""
informacion_bienvenida = RichTextField(
'Informacion de Bienvenida', config_name='default')
class Meta:
verbose_name = "Configuracion de \"Acerca de\""
class Application(SingletonModel):
title = models.CharField('Titulo', max_length=250)
sub_title = models.CharField('Sub Titulo', max_length=250)
logo = models.ImageField('Imagen',
upload_to='cacao/',
help_text="Formato PNG transparente y 512x512 pixels de tamaño") # noqa
class Meta:
verbose_name = "Configuracion de Aplicación"
|
12bcc60fff5119e95fb1de593cc0c5e6ab8294ea | changes/api/jobstep_deallocate.py | changes/api/jobstep_deallocate.py | from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.in_progress, Status.allocated):
return {
"error": "Only allocated and running job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| Revert "Allow running jobsteps to be deallocated" | Revert "Allow running jobsteps to be deallocated"
This reverts commit 9b720026722ce92a8c0e05aa041d6e861c5e4e82.
| Python | apache-2.0 | wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes | from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.in_progress, Status.allocated):
return {
"error": "Only allocated and running job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
Revert "Allow running jobsteps to be deallocated"
This reverts commit 9b720026722ce92a8c0e05aa041d6e861c5e4e82. | from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| <commit_before>from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.in_progress, Status.allocated):
return {
"error": "Only allocated and running job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
<commit_msg>Revert "Allow running jobsteps to be deallocated"
This reverts commit 9b720026722ce92a8c0e05aa041d6e861c5e4e82.<commit_after> | from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.in_progress, Status.allocated):
return {
"error": "Only allocated and running job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
Revert "Allow running jobsteps to be deallocated"
This reverts commit 9b720026722ce92a8c0e05aa041d6e861c5e4e82.from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
| <commit_before>from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status not in (Status.in_progress, Status.allocated):
return {
"error": "Only allocated and running job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
<commit_msg>Revert "Allow running jobsteps to be deallocated"
This reverts commit 9b720026722ce92a8c0e05aa041d6e861c5e4e82.<commit_after>from __future__ import absolute_import, division, unicode_literals
from changes.api.base import APIView
from changes.constants import Status
from changes.config import db
from changes.jobs.sync_job_step import sync_job_step
from changes.models import JobStep
class JobStepDeallocateAPIView(APIView):
def post(self, step_id):
to_deallocate = JobStep.query.get(step_id)
if to_deallocate is None:
return '', 404
if to_deallocate.status != Status.allocated:
return {
"error": "Only {0} job steps may be deallocated.",
"actual_status": to_deallocate.status.name
}, 400
to_deallocate.status = Status.pending_allocation
to_deallocate.date_started = None
to_deallocate.date_finished = None
db.session.add(to_deallocate)
db.session.commit()
sync_job_step.delay(
step_id=to_deallocate.id.hex,
task_id=to_deallocate.id.hex,
parent_task_id=to_deallocate.job_id.hex,
)
return self.respond(to_deallocate)
|
c3745e7017c1788f4633d09ef4d29a37018b53d3 | populus/cli/main.py | populus/cli/main.py | import click
@click.group()
def main():
"""
Populus
"""
pass
| import click
CONTEXT_SETTINGS = dict(
# Support -h as a shortcut for --help
help_option_names=['-h', '--help'],
)
@click.group(context_settings=CONTEXT_SETTINGS)
def main():
"""
Populus
"""
pass
| Support -h as a shortcut for --help | CLI: Support -h as a shortcut for --help
| Python | mit | pipermerriam/populus,euri10/populus,euri10/populus,pipermerriam/populus,euri10/populus | import click
@click.group()
def main():
"""
Populus
"""
pass
CLI: Support -h as a shortcut for --help | import click
CONTEXT_SETTINGS = dict(
# Support -h as a shortcut for --help
help_option_names=['-h', '--help'],
)
@click.group(context_settings=CONTEXT_SETTINGS)
def main():
"""
Populus
"""
pass
| <commit_before>import click
@click.group()
def main():
"""
Populus
"""
pass
<commit_msg>CLI: Support -h as a shortcut for --help<commit_after> | import click
CONTEXT_SETTINGS = dict(
# Support -h as a shortcut for --help
help_option_names=['-h', '--help'],
)
@click.group(context_settings=CONTEXT_SETTINGS)
def main():
"""
Populus
"""
pass
| import click
@click.group()
def main():
"""
Populus
"""
pass
CLI: Support -h as a shortcut for --helpimport click
CONTEXT_SETTINGS = dict(
# Support -h as a shortcut for --help
help_option_names=['-h', '--help'],
)
@click.group(context_settings=CONTEXT_SETTINGS)
def main():
"""
Populus
"""
pass
| <commit_before>import click
@click.group()
def main():
"""
Populus
"""
pass
<commit_msg>CLI: Support -h as a shortcut for --help<commit_after>import click
CONTEXT_SETTINGS = dict(
# Support -h as a shortcut for --help
help_option_names=['-h', '--help'],
)
@click.group(context_settings=CONTEXT_SETTINGS)
def main():
"""
Populus
"""
pass
|
35e54a2fa4408aff70989437554cfe1ee2318799 | test_utils/views.py | test_utils/views.py | from django.http import HttpResponse
import logging
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
| from django.http import HttpResponse
import logging
from test_utils.testmaker.processors.base import slugify
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
filename = slugify(filename)
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
| Use slugify to filenameify the strings passed in. This should probably live in a test_utils.utils. Too many utils! | Use slugify to filenameify the strings passed in. This should probably live in a test_utils.utils. Too many utils! | Python | mit | ericholscher/django-test-utils,frac/django-test-utils,acdha/django-test-utils,frac/django-test-utils,ericholscher/django-test-utils,acdha/django-test-utils | from django.http import HttpResponse
import logging
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
Use slugify to filenameify the strings passed in. This should probably live in a test_utils.utils. Too many utils! | from django.http import HttpResponse
import logging
from test_utils.testmaker.processors.base import slugify
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
filename = slugify(filename)
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
| <commit_before>from django.http import HttpResponse
import logging
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
<commit_msg>Use slugify to filenameify the strings passed in. This should probably live in a test_utils.utils. Too many utils!<commit_after> | from django.http import HttpResponse
import logging
from test_utils.testmaker.processors.base import slugify
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
filename = slugify(filename)
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
| from django.http import HttpResponse
import logging
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
Use slugify to filenameify the strings passed in. This should probably live in a test_utils.utils. Too many utils!from django.http import HttpResponse
import logging
from test_utils.testmaker.processors.base import slugify
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
filename = slugify(filename)
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
| <commit_before>from django.http import HttpResponse
import logging
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
<commit_msg>Use slugify to filenameify the strings passed in. This should probably live in a test_utils.utils. Too many utils!<commit_after>from django.http import HttpResponse
import logging
from test_utils.testmaker.processors.base import slugify
from test_utils.testmaker import Testmaker
def set_logging(request, filename=None):
if not filename:
filename = request.REQUEST['filename']
filename = slugify(filename)
log_file = '/tmp/testmaker/tests/%s_tests_custom.py' % filename
serialize_file = '/tmp/testmaker/tests/%s_serial_custm.py' % filename
tm = Testmaker()
tm.setup_logging(test_file=log_file, serialize_file=serialize_file)
tm.prepare_test_file()
return HttpResponse('Setup logging %s' % tm.test_file)
def show_log(request):
file = Testmaker.logfile()
contents = open(file)
return HttpResponse(contents.read(), content_type='text/plain')
HttpResponse()
|
955cb0d27ab52348b753c3edea731223e2631f50 | Climate_Police/tests/test_plot_pollutants.py | Climate_Police/tests/test_plot_pollutants.py | #run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main() | #run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
| Add flag to plot_pollutant unit test | Add flag to plot_pollutant unit test
also change assertTrue to assertEqual | Python | mit | abhisheksugam/Climate_Police | #run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main()Add flag to plot_pollutant unit test
also change assertTrue to assertEqual | #run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
| <commit_before>#run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main()<commit_msg>Add flag to plot_pollutant unit test
also change assertTrue to assertEqual<commit_after> | #run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
| #run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main()Add flag to plot_pollutant unit test
also change assertTrue to assertEqual#run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
| <commit_before>#run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main()<commit_msg>Add flag to plot_pollutant unit test
also change assertTrue to assertEqual<commit_after>#run the test with default values of df, state and year
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
|
b715a0fdc6db68f3c0ae30f1ff09e1aa8bb94524 | Wrappers/Phenix/Xtriage.py | Wrappers/Phenix/Xtriage.py | #!/usr/bin/env python
# Xtriage.py
# Copyright (C) 2017 Diamond Light Source, Richard Gildea
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
if __name__ == "__main__":
import sys
assert len(sys.argv[1:]) == 1
xtriage = Xtriage()
xtriage.set_mtz(sys.argv[1])
xtriage.run()
print("".join(xtriage.get_all_output()))
| #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
| Clean __main__ + header boiler plate | Clean __main__ + header boiler plate
Then flake8 warnings re: os / sys etc.
| Python | bsd-3-clause | xia2/xia2,xia2/xia2 | #!/usr/bin/env python
# Xtriage.py
# Copyright (C) 2017 Diamond Light Source, Richard Gildea
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
if __name__ == "__main__":
import sys
assert len(sys.argv[1:]) == 1
xtriage = Xtriage()
xtriage.set_mtz(sys.argv[1])
xtriage.run()
print("".join(xtriage.get_all_output()))
Clean __main__ + header boiler plate
Then flake8 warnings re: os / sys etc. | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
| <commit_before>#!/usr/bin/env python
# Xtriage.py
# Copyright (C) 2017 Diamond Light Source, Richard Gildea
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
if __name__ == "__main__":
import sys
assert len(sys.argv[1:]) == 1
xtriage = Xtriage()
xtriage.set_mtz(sys.argv[1])
xtriage.run()
print("".join(xtriage.get_all_output()))
<commit_msg>Clean __main__ + header boiler plate
Then flake8 warnings re: os / sys etc.<commit_after> | #!/usr/bin/env python
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
| #!/usr/bin/env python
# Xtriage.py
# Copyright (C) 2017 Diamond Light Source, Richard Gildea
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
if __name__ == "__main__":
import sys
assert len(sys.argv[1:]) == 1
xtriage = Xtriage()
xtriage.set_mtz(sys.argv[1])
xtriage.run()
print("".join(xtriage.get_all_output()))
Clean __main__ + header boiler plate
Then flake8 warnings re: os / sys etc.#!/usr/bin/env python
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
| <commit_before>#!/usr/bin/env python
# Xtriage.py
# Copyright (C) 2017 Diamond Light Source, Richard Gildea
#
# This code is distributed under the BSD license, a copy of which is
# included in the root directory of this package.
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
if __name__ == "__main__":
import sys
assert len(sys.argv[1:]) == 1
xtriage = Xtriage()
xtriage.set_mtz(sys.argv[1])
xtriage.run()
print("".join(xtriage.get_all_output()))
<commit_msg>Clean __main__ + header boiler plate
Then flake8 warnings re: os / sys etc.<commit_after>#!/usr/bin/env python
from __future__ import absolute_import, division, print_function
from xia2.Driver.DriverFactory import DriverFactory
def Xtriage(DriverType=None):
"""A factory for the Xtriage wrappers."""
DriverInstance = DriverFactory.Driver("simple")
class XtriageWrapper(DriverInstance.__class__):
"""A wrapper class for phenix.xtriage."""
def __init__(self):
DriverInstance.__class__.__init__(self)
self.set_executable("mmtbx.xtriage")
self._mtz = None
return
def set_mtz(self, mtz):
self._mtz = mtz
return
def run(self):
import os
assert self._mtz is not None
assert os.path.isfile(self._mtz)
self.add_command_line(self._mtz)
self.start()
self.close_wait()
self.check_for_errors()
return XtriageWrapper()
|
5a5e1b528265eb7b7d901566d9afcba52a4cec78 | mothermayi/entryway.py | mothermayi/entryway.py | import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
LOGGER.debug("Loaded plugin %s", plugin['name'])
if plugin['name'] in PLUGINS:
raise Exception("Already have a plugin with the name {}, cannot overwrite".format(plugin['name']))
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
| import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
plugin['dist'] = entry.dist
LOGGER.debug("Loaded plugin %s from %s", plugin['name'], entry.dist)
existing_plugin = PLUGINS.get(plugin['name'], None)
if existing_plugin and existing_plugin['dist'] != plugin['dist']:
LOGGER.warning("Overwriting plugin %s with %s", existing_plugin, plugin)
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
| Raise a warning instead of failing on plugin overwrite | Raise a warning instead of failing on plugin overwrite
I'm not sure why but my local developer versions of plugins are
registering more than once with different names. I think it might be
because I have a difference between the package name and the module
name. Not sure. Anyways, I'm going to change this for now with a nice
large warning so that I can keep an eye on it and keep developing
| Python | mit | EliRibble/mothermayi | import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
LOGGER.debug("Loaded plugin %s", plugin['name'])
if plugin['name'] in PLUGINS:
raise Exception("Already have a plugin with the name {}, cannot overwrite".format(plugin['name']))
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
Raise a warning instead of failing on plugin overwrite
I'm not sure why but my local developer versions of plugins are
registering more than once with different names. I think it might be
because I have a difference between the package name and the module
name. Not sure. Anyways, I'm going to change this for now with a nice
large warning so that I can keep an eye on it and keep developing | import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
plugin['dist'] = entry.dist
LOGGER.debug("Loaded plugin %s from %s", plugin['name'], entry.dist)
existing_plugin = PLUGINS.get(plugin['name'], None)
if existing_plugin and existing_plugin['dist'] != plugin['dist']:
LOGGER.warning("Overwriting plugin %s with %s", existing_plugin, plugin)
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
| <commit_before>import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
LOGGER.debug("Loaded plugin %s", plugin['name'])
if plugin['name'] in PLUGINS:
raise Exception("Already have a plugin with the name {}, cannot overwrite".format(plugin['name']))
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
<commit_msg>Raise a warning instead of failing on plugin overwrite
I'm not sure why but my local developer versions of plugins are
registering more than once with different names. I think it might be
because I have a difference between the package name and the module
name. Not sure. Anyways, I'm going to change this for now with a nice
large warning so that I can keep an eye on it and keep developing<commit_after> | import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
plugin['dist'] = entry.dist
LOGGER.debug("Loaded plugin %s from %s", plugin['name'], entry.dist)
existing_plugin = PLUGINS.get(plugin['name'], None)
if existing_plugin and existing_plugin['dist'] != plugin['dist']:
LOGGER.warning("Overwriting plugin %s with %s", existing_plugin, plugin)
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
| import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
LOGGER.debug("Loaded plugin %s", plugin['name'])
if plugin['name'] in PLUGINS:
raise Exception("Already have a plugin with the name {}, cannot overwrite".format(plugin['name']))
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
Raise a warning instead of failing on plugin overwrite
I'm not sure why but my local developer versions of plugins are
registering more than once with different names. I think it might be
because I have a difference between the package name and the module
name. Not sure. Anyways, I'm going to change this for now with a nice
large warning so that I can keep an eye on it and keep developingimport logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
plugin['dist'] = entry.dist
LOGGER.debug("Loaded plugin %s from %s", plugin['name'], entry.dist)
existing_plugin = PLUGINS.get(plugin['name'], None)
if existing_plugin and existing_plugin['dist'] != plugin['dist']:
LOGGER.warning("Overwriting plugin %s with %s", existing_plugin, plugin)
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
| <commit_before>import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
LOGGER.debug("Loaded plugin %s", plugin['name'])
if plugin['name'] in PLUGINS:
raise Exception("Already have a plugin with the name {}, cannot overwrite".format(plugin['name']))
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
<commit_msg>Raise a warning instead of failing on plugin overwrite
I'm not sure why but my local developer versions of plugins are
registering more than once with different names. I think it might be
because I have a difference between the package name and the module
name. Not sure. Anyways, I'm going to change this for now with a nice
large warning so that I can keep an eye on it and keep developing<commit_after>import logging
import pkg_resources
LOGGER = logging.getLogger(__name__)
PLUGINS = {}
def load():
for entry in pkg_resources.iter_entry_points(group='mothermayi'):
runner = entry.load()
plugin = runner()
plugin['dist'] = entry.dist
LOGGER.debug("Loaded plugin %s from %s", plugin['name'], entry.dist)
existing_plugin = PLUGINS.get(plugin['name'], None)
if existing_plugin and existing_plugin['dist'] != plugin['dist']:
LOGGER.warning("Overwriting plugin %s with %s", existing_plugin, plugin)
PLUGINS[plugin['name']] = plugin
def get_plugins(name):
return {k: v for k, v in PLUGINS.items() if name in v}
|
067c9be6c9e362a9a902f3233e1ae0b2643d405f | src/sequences/io/trend/price/macd/s_chunk.py | src/sequences/io/trend/price/macd/s_chunk.py | '''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(input):
python_list = json.loads(input)
chunks = macd_chunk(python_list, getter=lambda x: x[u'close'])
return json.dumps(chunks)
| '''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(data, pricetype=u'close'):
python_list = json.loads(data)
chunks = macd_chunk(python_list, getter=lambda x: x[pricetype])
return json.dumps(chunks)
| Handle two arguments while converting to JSON | Feature: Handle two arguments while converting to JSON | Python | mpl-2.0 | Skalman/owl_analytics | '''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(input):
python_list = json.loads(input)
chunks = macd_chunk(python_list, getter=lambda x: x[u'close'])
return json.dumps(chunks)
Feature: Handle two arguments while converting to JSON | '''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(data, pricetype=u'close'):
python_list = json.loads(data)
chunks = macd_chunk(python_list, getter=lambda x: x[pricetype])
return json.dumps(chunks)
| <commit_before>'''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(input):
python_list = json.loads(input)
chunks = macd_chunk(python_list, getter=lambda x: x[u'close'])
return json.dumps(chunks)
<commit_msg>Feature: Handle two arguments while converting to JSON<commit_after> | '''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(data, pricetype=u'close'):
python_list = json.loads(data)
chunks = macd_chunk(python_list, getter=lambda x: x[pricetype])
return json.dumps(chunks)
| '''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(input):
python_list = json.loads(input)
chunks = macd_chunk(python_list, getter=lambda x: x[u'close'])
return json.dumps(chunks)
Feature: Handle two arguments while converting to JSON'''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(data, pricetype=u'close'):
python_list = json.loads(data)
chunks = macd_chunk(python_list, getter=lambda x: x[pricetype])
return json.dumps(chunks)
| <commit_before>'''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(input):
python_list = json.loads(input)
chunks = macd_chunk(python_list, getter=lambda x: x[u'close'])
return json.dumps(chunks)
<commit_msg>Feature: Handle two arguments while converting to JSON<commit_after>'''
Sequential Price-Trend Models: MACD with Price-Bar-Chunk (MDC)
'''
import numpy
import json
from techmodels.overlays.trend.price.chunk import sign_chunker
from techmodels.indicators.trend.price.macd import MACDIndicator
def macd_chunk(data, nfast=10, nslow=35, nema=5, getter=lambda x: x):
prices = numpy.array(map(getter, data))
macd_oscillator = MACDIndicator(prices, nfast, nslow, nema)
# Skip the first nfast values
cropped_indicator = macd_oscillator.indicator()[nfast + 1:]
cropped_data = data[nfast + 1:]
chunks = sign_chunker(cropped_indicator, cropped_data)
return chunks
def macd_chunk_json(data, pricetype=u'close'):
python_list = json.loads(data)
chunks = macd_chunk(python_list, getter=lambda x: x[pricetype])
return json.dumps(chunks)
|
179365edffa9333afaed71568d89ab5c57607c42 | vinepy/utils.py | vinepy/utils.py | from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
| from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
VINE_HASHING_KEY = 'BuzaW7ZmKAqbhMOei5J1nvr6gXHwdpDjITtFUPxQ20E9VY3Ll'
index_key_dict = dict([(char, index) for index, char in enumerate(VINE_HASHING_KEY)])
def post_long_id(short_id):
prepared_hash = enumerate(short_id[::-1])
long_id = reduce(lambda acc, (index, key): acc + index_key_dict[key] * len(VINE_HASHING_KEY)**index, prepared_hash, 0)
return long_id
def post_short_id(long_id):
id_fragments = int2base(long_id, len(VINE_HASHING_KEY))
short_id_fragments = map(lambda fragment: VINE_HASHING_KEY[fragment], id_fragments)
return ''.join(short_id_fragments)
def int2base(x, base):
if x < 0: sign = -1
elif x == 0: return 0
else: sign = 1
x *= sign
digits = []
while x:
digits.append(x % base)
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return digits
| Add util to convert between short and long postId formats | Add util to convert between short and long postId formats
| Python | mit | davoclavo/vinepy | from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
Add util to convert between short and long postId formats | from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
VINE_HASHING_KEY = 'BuzaW7ZmKAqbhMOei5J1nvr6gXHwdpDjITtFUPxQ20E9VY3Ll'
index_key_dict = dict([(char, index) for index, char in enumerate(VINE_HASHING_KEY)])
def post_long_id(short_id):
prepared_hash = enumerate(short_id[::-1])
long_id = reduce(lambda acc, (index, key): acc + index_key_dict[key] * len(VINE_HASHING_KEY)**index, prepared_hash, 0)
return long_id
def post_short_id(long_id):
id_fragments = int2base(long_id, len(VINE_HASHING_KEY))
short_id_fragments = map(lambda fragment: VINE_HASHING_KEY[fragment], id_fragments)
return ''.join(short_id_fragments)
def int2base(x, base):
if x < 0: sign = -1
elif x == 0: return 0
else: sign = 1
x *= sign
digits = []
while x:
digits.append(x % base)
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return digits
| <commit_before>from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
<commit_msg>Add util to convert between short and long postId formats<commit_after> | from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
VINE_HASHING_KEY = 'BuzaW7ZmKAqbhMOei5J1nvr6gXHwdpDjITtFUPxQ20E9VY3Ll'
index_key_dict = dict([(char, index) for index, char in enumerate(VINE_HASHING_KEY)])
def post_long_id(short_id):
prepared_hash = enumerate(short_id[::-1])
long_id = reduce(lambda acc, (index, key): acc + index_key_dict[key] * len(VINE_HASHING_KEY)**index, prepared_hash, 0)
return long_id
def post_short_id(long_id):
id_fragments = int2base(long_id, len(VINE_HASHING_KEY))
short_id_fragments = map(lambda fragment: VINE_HASHING_KEY[fragment], id_fragments)
return ''.join(short_id_fragments)
def int2base(x, base):
if x < 0: sign = -1
elif x == 0: return 0
else: sign = 1
x *= sign
digits = []
while x:
digits.append(x % base)
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return digits
| from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
Add util to convert between short and long postId formatsfrom datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
VINE_HASHING_KEY = 'BuzaW7ZmKAqbhMOei5J1nvr6gXHwdpDjITtFUPxQ20E9VY3Ll'
index_key_dict = dict([(char, index) for index, char in enumerate(VINE_HASHING_KEY)])
def post_long_id(short_id):
prepared_hash = enumerate(short_id[::-1])
long_id = reduce(lambda acc, (index, key): acc + index_key_dict[key] * len(VINE_HASHING_KEY)**index, prepared_hash, 0)
return long_id
def post_short_id(long_id):
id_fragments = int2base(long_id, len(VINE_HASHING_KEY))
short_id_fragments = map(lambda fragment: VINE_HASHING_KEY[fragment], id_fragments)
return ''.join(short_id_fragments)
def int2base(x, base):
if x < 0: sign = -1
elif x == 0: return 0
else: sign = 1
x *= sign
digits = []
while x:
digits.append(x % base)
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return digits
| <commit_before>from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
<commit_msg>Add util to convert between short and long postId formats<commit_after>from datetime import datetime
def strptime(string, fmt='%Y-%m-%dT%H:%M:%S.%f'):
return datetime.strptime(string, fmt)
# From http://stackoverflow.com/a/14620633
# CAUTION: it causes memory leak in < 2.7.3 and < 3.2.3
class AttrDict(dict):
def __init__(self, *args, **kwargs):
super(AttrDict, self).__init__(*args, **kwargs)
self.__dict__ = self
VINE_HASHING_KEY = 'BuzaW7ZmKAqbhMOei5J1nvr6gXHwdpDjITtFUPxQ20E9VY3Ll'
index_key_dict = dict([(char, index) for index, char in enumerate(VINE_HASHING_KEY)])
def post_long_id(short_id):
prepared_hash = enumerate(short_id[::-1])
long_id = reduce(lambda acc, (index, key): acc + index_key_dict[key] * len(VINE_HASHING_KEY)**index, prepared_hash, 0)
return long_id
def post_short_id(long_id):
id_fragments = int2base(long_id, len(VINE_HASHING_KEY))
short_id_fragments = map(lambda fragment: VINE_HASHING_KEY[fragment], id_fragments)
return ''.join(short_id_fragments)
def int2base(x, base):
if x < 0: sign = -1
elif x == 0: return 0
else: sign = 1
x *= sign
digits = []
while x:
digits.append(x % base)
x /= base
if sign < 0:
digits.append('-')
digits.reverse()
return digits
|
2565df456ecb290f620ce4dadca19c76b0eeb1af | widgets/card.py | widgets/card.py | # -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, **kwargs):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, **kwargs)
| # -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, detailed=False, small=False):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, detailed=detailed, small=small)
| Fix a bug in caching | Fix a bug in caching
| Python | apache-2.0 | teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr,teampopong/pokr.kr | # -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, **kwargs):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, **kwargs)
Fix a bug in caching | # -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, detailed=False, small=False):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, detailed=detailed, small=small)
| <commit_before># -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, **kwargs):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, **kwargs)
<commit_msg>Fix a bug in caching<commit_after> | # -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, detailed=False, small=False):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, detailed=detailed, small=small)
| # -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, **kwargs):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, **kwargs)
Fix a bug in caching# -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, detailed=False, small=False):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, detailed=detailed, small=small)
| <commit_before># -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, **kwargs):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, **kwargs)
<commit_msg>Fix a bug in caching<commit_after># -*- coding: utf-8 -*-
from flask import render_template
from cache import cache
from models.person import Person
@cache.memoize(24 * 60 * 60)
def card(person_or_id, detailed=False, small=False):
if isinstance(person_or_id, Person):
person = person_or_id
else:
person = Person.query.filter_by(id=person_or_id).first()
return render_template('widgets/card.html', person=person, detailed=detailed, small=small)
|
4320eecc294fa1233a2ad7b4cdec1e2dc1e83b37 | testing/test_simbad.py | testing/test_simbad.py | import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| Remove real database during testing | Remove real database during testing
| Python | mit | mindriot101/k2catalogue | import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
Remove real database during testing | import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| <commit_before>import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
<commit_msg>Remove real database during testing<commit_after> | import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
Remove real database during testingimport pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
| <commit_before>import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return session.query(EPIC).filter(EPIC.epic_id == 201763507).first()
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '169.18 4.72'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
<commit_msg>Remove real database during testing<commit_after>import pytest
import vcr
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue.models import EPIC, create_session
from k2catalogue.simbad import Simbad
@pytest.fixture
def session():
return create_session()
@pytest.fixture
def epic(session):
return mock.Mock(ra=123.456, dec=-56.789)
@pytest.fixture
def simbad(epic):
return Simbad(epic)
@pytest.fixture
def form_data(simbad):
return simbad.form_data(radius=5.)
@vcr.use_cassette('.cassettes/response.yml')
@pytest.fixture
def response(simbad):
return simbad.send_request()
def test_form_data(form_data):
assert form_data['Coord'] == '123.46 -56.79'
def test_response(response):
assert response.status_code == 200
def test_open(simbad):
with mock.patch('k2catalogue.simbad.webbrowser.open') as mock_open:
simbad.open(radius=10)
url, = mock_open.call_args[0]
assert 'file://' in url and 'html' in url
|
05ab2efe19ada626b254ea019d6f2fb0a16a923b | clinvoc/examples/parser_example.py | clinvoc/examples/parser_example.py | from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(codeset)
| from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(sorted(codeset))
| Sort codes in ICD 9 parsing example | Sort codes in ICD 9 parsing example
| Python | mit | jcrudy/clinvoc | from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(codeset)
Sort codes in ICD 9 parsing example | from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(sorted(codeset))
| <commit_before>from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(codeset)
<commit_msg>Sort codes in ICD 9 parsing example<commit_after> | from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(sorted(codeset))
| from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(codeset)
Sort codes in ICD 9 parsing examplefrom clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(sorted(codeset))
| <commit_before>from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(codeset)
<commit_msg>Sort codes in ICD 9 parsing example<commit_after>from clinvoc.icd9 import ICD9CM
# This string describes a set of ICD 9 codes
codestring = '745.0-745.3, 745.6*, 746, 747.1-747.49, 747.81, 747.89, 35.8, 35.81, 35.82, 35.83, 35.84'
# Use clinvoc to parse and standardize the above codes
vocab = ICD9CM()
codeset = vocab.parse(codestring)
print(sorted(codeset))
|
3152e485bb8ee07489c1fb472652934b7dfec540 | tests/test_clone.py | tests/test_clone.py | import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
| import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet-git', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
| Use unabbreviated --quiet-git option in clone test | Use unabbreviated --quiet-git option in clone test
| Python | isc | ustuehler/git-cvs,ustuehler/git-cvs | import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
Use unabbreviated --quiet-git option in clone test | import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet-git', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
<commit_msg>Use unabbreviated --quiet-git option in clone test<commit_after> | import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet-git', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
| import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
Use unabbreviated --quiet-git option in clone testimport unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet-git', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
| <commit_before>import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
<commit_msg>Use unabbreviated --quiet-git option in clone test<commit_after>import unittest
from cvsgit.command.clone import clone
from os.path import dirname, join, exists
from shutil import rmtree
class Test(unittest.TestCase):
def setUp(self):
self.tmpdir = join(dirname(__file__), 'tmp')
def tearDown(self):
if exists(self.tmpdir):
rmtree(self.tmpdir)
def testZombieDetection(self):
# This repository has a zombie copy of a file that was actually
# moved to Attic.
cvsroot = join(dirname(__file__), 'data', 'zombie')
# TODO: Discard command output to keep the test output clean.
self.assertEquals(clone().eval('--quiet-git', cvsroot, self.tmpdir), 0)
# TODO: Verify that the correct file was picked and the zombie
# got ignored.
if __name__ == '__main__':
unittest.main()
|
8ae44c3645eb6ec0bc0063299a193c14280430c7 | tests/capstone/policy/test_greedy.py | tests/capstone/policy/test_greedy.py | import unittest
from capstone.policy import GreedyPolicy
from capstone.util import play_match
class FakeEnv(object):
def __init__(self):
self._actions = []
def cur_state(self):
return 'FakeState'
def actions(self, state):
return self._actions
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
self.env = FakeEnv()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
| import unittest
from capstone.policy import GreedyPolicy
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
| Remove FakeEnv from Greedy test suite | Remove FakeEnv from Greedy test suite
| Python | mit | davidrobles/mlnd-capstone-code | import unittest
from capstone.policy import GreedyPolicy
from capstone.util import play_match
class FakeEnv(object):
def __init__(self):
self._actions = []
def cur_state(self):
return 'FakeState'
def actions(self, state):
return self._actions
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
self.env = FakeEnv()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
Remove FakeEnv from Greedy test suite | import unittest
from capstone.policy import GreedyPolicy
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
| <commit_before>import unittest
from capstone.policy import GreedyPolicy
from capstone.util import play_match
class FakeEnv(object):
def __init__(self):
self._actions = []
def cur_state(self):
return 'FakeState'
def actions(self, state):
return self._actions
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
self.env = FakeEnv()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
<commit_msg>Remove FakeEnv from Greedy test suite<commit_after> | import unittest
from capstone.policy import GreedyPolicy
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
| import unittest
from capstone.policy import GreedyPolicy
from capstone.util import play_match
class FakeEnv(object):
def __init__(self):
self._actions = []
def cur_state(self):
return 'FakeState'
def actions(self, state):
return self._actions
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
self.env = FakeEnv()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
Remove FakeEnv from Greedy test suiteimport unittest
from capstone.policy import GreedyPolicy
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
| <commit_before>import unittest
from capstone.policy import GreedyPolicy
from capstone.util import play_match
class FakeEnv(object):
def __init__(self):
self._actions = []
def cur_state(self):
return 'FakeState'
def actions(self, state):
return self._actions
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
self.env = FakeEnv()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
<commit_msg>Remove FakeEnv from Greedy test suite<commit_after>import unittest
from capstone.policy import GreedyPolicy
class TestGreedy(unittest.TestCase):
def setUp(self):
self.policy = GreedyPolicy()
def test_max_action(self):
state = 1
actions = [1, 5, 8]
fake_qf = {
(state, 1): 5,
(state, 5): 33,
(state, 8): 23,
}
action = self.policy.action(fake_qf, state, actions)
self.assertEqual(action, 5)
def test_raises_value_error_if_no_actions_available(self):
state = 1
actions = []
with self.assertRaises(ValueError):
self.policy.action({}, state, actions)
|
02af21600824ee1f836e89e825cc94fd1d949628 | resolwe/flow/migrations/0046_purge_data_dependencies.py | resolwe/flow/migrations/0046_purge_data_dependencies.py | # Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(
child=data.id, kind=DataDependency.KIND_IO
):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
| # Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(child=data.id, kind="io"):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
| Fix migration to use actual value instead of non-existing attribute | Fix migration to use actual value instead of non-existing attribute
| Python | apache-2.0 | genialis/resolwe,genialis/resolwe | # Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(
child=data.id, kind=DataDependency.KIND_IO
):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
Fix migration to use actual value instead of non-existing attribute | # Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(child=data.id, kind="io"):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
| <commit_before># Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(
child=data.id, kind=DataDependency.KIND_IO
):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
<commit_msg>Fix migration to use actual value instead of non-existing attribute<commit_after> | # Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(child=data.id, kind="io"):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
| # Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(
child=data.id, kind=DataDependency.KIND_IO
):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
Fix migration to use actual value instead of non-existing attribute# Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(child=data.id, kind="io"):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
| <commit_before># Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(
child=data.id, kind=DataDependency.KIND_IO
):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
<commit_msg>Fix migration to use actual value instead of non-existing attribute<commit_after># Generated by Django 2.2.15 on 2020-09-21 11:38
from django.db import migrations
from resolwe.flow.utils import iterate_fields
def purge_data_dependencies(apps, schema_editor):
Data = apps.get_model("flow", "Data")
DataDependency = apps.get_model("flow", "DataDependency")
for data in Data.objects.iterator():
parent_pks = []
for field_schema, fields in iterate_fields(
data.input, data.process.input_schema
):
name = field_schema["name"]
value = fields[name]
if field_schema.get("type", "").startswith("data:"):
parent_pks.append(value)
elif field_schema.get("type", "").startswith("list:data:"):
parent_pks.extend(value)
parent_pks = [
pk if Data.objects.filter(pk=pk).exists() else None for pk in parent_pks
]
for dependency in DataDependency.objects.filter(child=data.id, kind="io"):
parent_pk = dependency.parent.pk if dependency.parent else None
if parent_pk in parent_pks:
parent_pks.remove(parent_pk)
else:
dependency.delete()
class Migration(migrations.Migration):
dependencies = [
("flow", "0045_unreferenced_storages"),
]
operations = [
migrations.RunPython(purge_data_dependencies),
]
|
9934fefe478bfa99bc8998ea5021700696160444 | sale_payment_method_automatic_workflow/__openerp__.py | sale_payment_method_automatic_workflow/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
| Correct author list, add OCA | Correct author list, add OCA
| Python | agpl-3.0 | Endika/e-commerce,BT-fgarbely/e-commerce,cloud9UG/e-commerce,JayVora-SerpentCS/e-commerce,BT-ojossen/e-commerce,Antiun/e-commerce,brain-tec/e-commerce,raycarnes/e-commerce,BT-ojossen/e-commerce,raycarnes/e-commerce,vauxoo-dev/e-commerce,damdam-s/e-commerce,Antiun/e-commerce,jt-xx/e-commerce,brain-tec/e-commerce,JayVora-SerpentCS/e-commerce,charbeljc/e-commerce,fevxie/e-commerce,gurneyalex/e-commerce,BT-jmichaud/e-commerce,vauxoo-dev/e-commerce,Endika/e-commerce,jt-xx/e-commerce | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
Correct author list, add OCA | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
<commit_msg>Correct author list, add OCA<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
Correct author list, add OCA# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': ['Camptocamp', 'Akretion'],
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
<commit_msg>Correct author list, add OCA<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier
# Copyright 2015 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{'name': 'Sale Payment Method - Automatic Worflow (link module)',
'version': '1.0',
'author': 'Camptocamp,Akretion,Odoo Community Association (OCA)',
'license': 'AGPL-3',
'category': 'Generic Modules/Others',
'depends': ['sale_payment_method',
'sale_automatic_workflow'],
'website': 'http://www.camptocamp.com',
'data': ['view/sale_order_view.xml',
'view/payment_method_view.xml',
],
'test': [],
'installable': True,
'auto_install': True,
}
|
0d7f93a787dcf723d79e9122702833c4942f09cc | photo/qt/image.py | photo/qt/image.py | """Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
if self.item.orientation:
rm = QtGui.QMatrix()
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = rm.rotate(int(m.group(1)))
return pixmap.transformed(rm)
else:
return pixmap
| """Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
rm = None
try:
rm = self.item.rotmatrix
except AttributeError:
if self.item.orientation:
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = QtGui.QMatrix().rotate(int(m.group(1)))
self.item.rotmatrix = rm
if rm:
return pixmap.transformed(rm)
else:
return pixmap
| Store the rotation matrix corresponding to the orientation in the item. | Store the rotation matrix corresponding to the orientation in the item.
| Python | apache-2.0 | RKrahl/photo-tools | """Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
if self.item.orientation:
rm = QtGui.QMatrix()
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = rm.rotate(int(m.group(1)))
return pixmap.transformed(rm)
else:
return pixmap
Store the rotation matrix corresponding to the orientation in the item. | """Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
rm = None
try:
rm = self.item.rotmatrix
except AttributeError:
if self.item.orientation:
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = QtGui.QMatrix().rotate(int(m.group(1)))
self.item.rotmatrix = rm
if rm:
return pixmap.transformed(rm)
else:
return pixmap
| <commit_before>"""Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
if self.item.orientation:
rm = QtGui.QMatrix()
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = rm.rotate(int(m.group(1)))
return pixmap.transformed(rm)
else:
return pixmap
<commit_msg>Store the rotation matrix corresponding to the orientation in the item.<commit_after> | """Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
rm = None
try:
rm = self.item.rotmatrix
except AttributeError:
if self.item.orientation:
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = QtGui.QMatrix().rotate(int(m.group(1)))
self.item.rotmatrix = rm
if rm:
return pixmap.transformed(rm)
else:
return pixmap
| """Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
if self.item.orientation:
rm = QtGui.QMatrix()
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = rm.rotate(int(m.group(1)))
return pixmap.transformed(rm)
else:
return pixmap
Store the rotation matrix corresponding to the orientation in the item."""Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
rm = None
try:
rm = self.item.rotmatrix
except AttributeError:
if self.item.orientation:
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = QtGui.QMatrix().rotate(int(m.group(1)))
self.item.rotmatrix = rm
if rm:
return pixmap.transformed(rm)
else:
return pixmap
| <commit_before>"""Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
if self.item.orientation:
rm = QtGui.QMatrix()
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = rm.rotate(int(m.group(1)))
return pixmap.transformed(rm)
else:
return pixmap
<commit_msg>Store the rotation matrix corresponding to the orientation in the item.<commit_after>"""Provide the class Image corresponding to an IdxItem.
"""
import os.path
import re
from PySide import QtGui
class ImageNotFoundError(Exception):
pass
class Image(object):
def __init__(self, basedir, item):
self.item = item
self.fileName = os.path.join(basedir, item.filename)
self.name = item.name or os.path.basename(self.fileName)
def getPixmap(self):
image = QtGui.QImage(self.fileName)
if image.isNull():
raise ImageNotFoundError("Cannot load %s." % self.fileName)
pixmap = QtGui.QPixmap.fromImage(image)
rm = None
try:
rm = self.item.rotmatrix
except AttributeError:
if self.item.orientation:
m = re.match(r"Rotate (\d+) CW", self.item.orientation)
if m:
rm = QtGui.QMatrix().rotate(int(m.group(1)))
self.item.rotmatrix = rm
if rm:
return pixmap.transformed(rm)
else:
return pixmap
|
1cf7b11cdb12a135f2dfa99d7e625eb160b0d7c2 | apps/orders/models.py | apps/orders/models.py | from django.db import models
# Create your models here.
| from django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
| Create Order and OrderItem Models | Create Order and OrderItem Models
| Python | mit | samitnuk/online_shop,samitnuk/online_shop,samitnuk/online_shop | from django.db import models
# Create your models here.
Create Order and OrderItem Models | from django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
| <commit_before>from django.db import models
# Create your models here.
<commit_msg>Create Order and OrderItem Models<commit_after> | from django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
| from django.db import models
# Create your models here.
Create Order and OrderItem Modelsfrom django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
| <commit_before>from django.db import models
# Create your models here.
<commit_msg>Create Order and OrderItem Models<commit_after>from django.db import models
from ..shop.models import Product
class Order(models.Model):
first_name = models.CharField(verbose_name="Ім,я", max_length=50)
last_name = models.CharField(verbose_name="Прізвище", max_length=50)
email = models.EmailField(verbose_name="Email")
address = models.CharField(verbose_name="Адреса", max_length=250)
postal_code = models.CharField(verbose_name="Поштовий код", max_length=20)
city = models.CharField(verbose_name="Місто", max_length=100)
created = models.DateTimeField(verbose_name="Створене", auto_now_add=True)
updated = models.DateTimeField(verbose_name="Оновлене", auto_now=True)
paid = models.BooleanField(verbose_name="Оплачене", default=False)
class Meta:
ordering = ('-created', )
verbose_name = "Замовлення"
verbose_name_plural = "Замовлення"
def __str__(self):
return "Замовлення: {}".format(self.id)
def get_total_cost(self):
return sum(item.get_cost() for item in self.items.all())
class OrderItem(models.Model):
order = models.ForeignKey(Order, related_name="items")
product = models.ForeignKey(Product, related_name="order_items")
price = models.DecimalField(verbose_name="Ціна", max_digits=10,
decimal_places=2)
quantity = models.PositiveIntegerField(verbose_name="К-сть", default=1)
def __str__(self):
return '{}'.format(self.id)
def get_cost(self):
return self.price * self.quantity
|
e5c5d7fae40eee638175c180c9cc7317d4bfe4b3 | scripts/migration/migrate_date_modified_for_existing_nodes.py | scripts/migration/migrate_date_modified_for_existing_nodes.py | """
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
node.save()
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
| """
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
errored_nodes = []
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
try:
node.save()
except KeyError as error: # Workaround for node whose files were not migrated long ago
logger.error('Could not migrate node due to error')
logger.exception(error)
errored_nodes.append(node)
else:
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if errored_nodes:
logger.error('{} errored'.format(len(errored_nodes)))
logger.error('\n'.join([each._id for each in errored_nodes]))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
| Fix migration; work around node whose files were unmigrated | Fix migration; work around node whose files were unmigrated
[skip ci]
| Python | apache-2.0 | cwisecarver/osf.io,cslzchen/osf.io,baylee-d/osf.io,crcresearch/osf.io,TomHeatwole/osf.io,kwierman/osf.io,KAsante95/osf.io,acshi/osf.io,zachjanicki/osf.io,mluo613/osf.io,kwierman/osf.io,chennan47/osf.io,RomanZWang/osf.io,KAsante95/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,mluke93/osf.io,emetsger/osf.io,amyshi188/osf.io,hmoco/osf.io,abought/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,erinspace/osf.io,saradbowman/osf.io,sloria/osf.io,rdhyee/osf.io,TomHeatwole/osf.io,emetsger/osf.io,abought/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,acshi/osf.io,jnayak1/osf.io,mluo613/osf.io,brandonPurvis/osf.io,mattclark/osf.io,zachjanicki/osf.io,emetsger/osf.io,TomHeatwole/osf.io,chennan47/osf.io,caseyrollins/osf.io,zachjanicki/osf.io,Ghalko/osf.io,jnayak1/osf.io,RomanZWang/osf.io,KAsante95/osf.io,doublebits/osf.io,chrisseto/osf.io,cwisecarver/osf.io,cslzchen/osf.io,sloria/osf.io,KAsante95/osf.io,leb2dg/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,KAsante95/osf.io,billyhunt/osf.io,caneruguz/osf.io,pattisdr/osf.io,adlius/osf.io,chrisseto/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,adlius/osf.io,rdhyee/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,icereval/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,mattclark/osf.io,acshi/osf.io,leb2dg/osf.io,abought/osf.io,crcresearch/osf.io,asanfilippo7/osf.io,mluke93/osf.io,amyshi188/osf.io,amyshi188/osf.io,pattisdr/osf.io,felliott/osf.io,alexschiller/osf.io,GageGaskins/osf.io,alexschiller/osf.io,caseyrollins/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,SSJohns/osf.io,mfraezz/osf.io,zamattiac/osf.io,abought/osf.io,samchrisinger/osf.io,caneruguz/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,doublebits/osf.io,brandonPurvis/osf.io,alexschiller/osf.io,TomBaxter/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,wearpants/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,DanielSBrown/osf.io,icereval/osf.io,saradbowman/osf.io,wearpants/osf.io,brandonPurvis/osf.io,hmoco/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,wearpants/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,sloria/osf.io,cwisecarver/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,samchrisinger/osf.io,mfraezz/osf.io,RomanZWang/osf.io,Ghalko/osf.io,GageGaskins/osf.io,hmoco/osf.io,chennan47/osf.io,baylee-d/osf.io,leb2dg/osf.io,billyhunt/osf.io,zamattiac/osf.io,jnayak1/osf.io,cwisecarver/osf.io,erinspace/osf.io,SSJohns/osf.io,Ghalko/osf.io,chrisseto/osf.io,pattisdr/osf.io,adlius/osf.io,monikagrabowska/osf.io,binoculars/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,kch8qx/osf.io,zamattiac/osf.io,erinspace/osf.io,Ghalko/osf.io,kwierman/osf.io,mluke93/osf.io,binoculars/osf.io,RomanZWang/osf.io,icereval/osf.io,adlius/osf.io,felliott/osf.io,aaxelb/osf.io,alexschiller/osf.io,jnayak1/osf.io,binoculars/osf.io,Nesiehr/osf.io,rdhyee/osf.io,Nesiehr/osf.io,SSJohns/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,acshi/osf.io,doublebits/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,DanielSBrown/osf.io,mluke93/osf.io,mluo613/osf.io,GageGaskins/osf.io,emetsger/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,billyhunt/osf.io,laurenrevere/osf.io,mluo613/osf.io,alexschiller/osf.io,billyhunt/osf.io,brianjgeiger/osf.io,felliott/osf.io,kch8qx/osf.io,SSJohns/osf.io,kwierman/osf.io,zachjanicki/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,doublebits/osf.io,aaxelb/osf.io,TomBaxter/osf.io,caneruguz/osf.io,felliott/osf.io,cslzchen/osf.io | """
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
node.save()
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
Fix migration; work around node whose files were unmigrated
[skip ci] | """
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
errored_nodes = []
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
try:
node.save()
except KeyError as error: # Workaround for node whose files were not migrated long ago
logger.error('Could not migrate node due to error')
logger.exception(error)
errored_nodes.append(node)
else:
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if errored_nodes:
logger.error('{} errored'.format(len(errored_nodes)))
logger.error('\n'.join([each._id for each in errored_nodes]))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
| <commit_before>"""
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
node.save()
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
<commit_msg>Fix migration; work around node whose files were unmigrated
[skip ci]<commit_after> | """
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
errored_nodes = []
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
try:
node.save()
except KeyError as error: # Workaround for node whose files were not migrated long ago
logger.error('Could not migrate node due to error')
logger.exception(error)
errored_nodes.append(node)
else:
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if errored_nodes:
logger.error('{} errored'.format(len(errored_nodes)))
logger.error('\n'.join([each._id for each in errored_nodes]))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
| """
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
node.save()
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
Fix migration; work around node whose files were unmigrated
[skip ci]"""
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
errored_nodes = []
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
try:
node.save()
except KeyError as error: # Workaround for node whose files were not migrated long ago
logger.error('Could not migrate node due to error')
logger.exception(error)
errored_nodes.append(node)
else:
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if errored_nodes:
logger.error('{} errored'.format(len(errored_nodes)))
logger.error('\n'.join([each._id for each in errored_nodes]))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
| <commit_before>"""
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
node.save()
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
<commit_msg>Fix migration; work around node whose files were unmigrated
[skip ci]<commit_after>"""
This will add a date_modified field to all nodes. Date_modified will be equivalent to the date of the last log.
"""
import sys
import logging
from modularodm import Q
from website.app import init_app
from website import models
from scripts import utils as script_utils
from framework.transactions.context import TokuTransaction
logger = logging.getLogger(__name__)
def date_updated(node):
"""
The most recent datetime when this node was modified, based on
the logs.
"""
try:
return node.logs[-1].date
except IndexError:
return node.date_created
def main(dry=True):
init_app(routes=False)
nodes = models.Node.find(Q('date_modified', 'eq', None))
node_count = nodes.count()
count = 0
errored_nodes = []
for node in nodes:
count += 1
with TokuTransaction():
node.date_modified = date_updated(node)
if not dry:
try:
node.save()
except KeyError as error: # Workaround for node whose files were not migrated long ago
logger.error('Could not migrate node due to error')
logger.exception(error)
errored_nodes.append(node)
else:
logger.info('{}/{} Node {} "date_modified" added'.format(count, node_count, node._id))
if errored_nodes:
logger.error('{} errored'.format(len(errored_nodes)))
logger.error('\n'.join([each._id for each in errored_nodes]))
if __name__ == '__main__':
dry_run = 'dry' in sys.argv
if not dry_run:
script_utils.add_file_logger(logger, __file__)
main(dry=dry_run)
|
348d69c1491b7065dff06f6aebba105d72bbfd0d | metal/mmtl/test_mmtl.py | metal/mmtl/test_mmtl.py | import unittest
from nose.tools import nottest
from metal.mmtl.BERT_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
| import unittest
from nose.tools import nottest
from metal.mmtl.bert_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
| Fix BERT -> bert in bert_tasks | Fix BERT -> bert in bert_tasks
| Python | apache-2.0 | HazyResearch/metal,HazyResearch/metal | import unittest
from nose.tools import nottest
from metal.mmtl.BERT_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
Fix BERT -> bert in bert_tasks | import unittest
from nose.tools import nottest
from metal.mmtl.bert_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from nose.tools import nottest
from metal.mmtl.BERT_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix BERT -> bert in bert_tasks<commit_after> | import unittest
from nose.tools import nottest
from metal.mmtl.bert_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
| import unittest
from nose.tools import nottest
from metal.mmtl.BERT_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
Fix BERT -> bert in bert_tasksimport unittest
from nose.tools import nottest
from metal.mmtl.bert_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from nose.tools import nottest
from metal.mmtl.BERT_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
<commit_msg>Fix BERT -> bert in bert_tasks<commit_after>import unittest
from nose.tools import nottest
from metal.mmtl.bert_tasks import create_tasks
from metal.mmtl.metal_model import MetalModel
from metal.mmtl.trainer import MultitaskTrainer
@nottest
class MMTLTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
task_names = [
"COLA",
"SST2",
"MNLI",
"RTE",
"WNLI",
"QQP",
"MRPC",
"STSB",
"QNLI",
]
cls.tasks = create_tasks(
task_names, max_datapoints=100, max_len=200, dl_kwargs={"batch_size": 8}
)
def test_mmtl_training(self):
model = MetalModel(self.tasks, verbose=False)
trainer = MultitaskTrainer(verbose=False)
trainer.train_model(model, self.tasks, n_epochs=1)
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.