commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b1c67f1846d03f20bf813b9a9940c07a0806a3ee | threaded_messages/search_indexes.py | threaded_messages/search_indexes.py | from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.RealTimeSearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
| from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
| Remove RealTimeSearchIndex, gone in Haystack 2.something | Remove RealTimeSearchIndex, gone in Haystack 2.something
| Python | mit | siovene/django-threaded-messages,siovene/django-threaded-messages,siovene/django-threaded-messages | from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.RealTimeSearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
Remove RealTimeSearchIndex, gone in Haystack 2.something | from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
| <commit_before>from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.RealTimeSearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
<commit_msg>Remove RealTimeSearchIndex, gone in Haystack 2.something<commit_after> | from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
| from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.RealTimeSearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
Remove RealTimeSearchIndex, gone in Haystack 2.somethingfrom haystack import indexes
from .models import Thread
class ThreadIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
| <commit_before>from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.RealTimeSearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
<commit_msg>Remove RealTimeSearchIndex, gone in Haystack 2.something<commit_after>from haystack import indexes
from .models import Thread
class ThreadIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
participants = indexes.MultiValueField()
last_message = indexes.DateTimeField(model_attr='latest_msg__sent_at')
def index_queryset(self):
return Thread.objects.all()
def prepare_participants(self, object):
return [p.user.pk for p in object.participants.all()]
def get_model(self):
return Thread
|
c0daf6e2c549e2857f8acfc69ae97635705a5342 | docs/conf.py | docs/conf.py | import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| Define date in docs dynamically | Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>
| Python | bsd-3-clause | pymanopt/pymanopt,pymanopt/pymanopt | import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com> | import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| <commit_before>import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
<commit_msg>Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after> | import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com>import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
| <commit_before>import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = "2016-2021, {:s}".format(author)
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "www.pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
<commit_msg>Define date in docs dynamically
Signed-off-by: Niklas Koep <342d5290239d9c5264c8f98185afedb99596601a@gmail.com><commit_after>import datetime
import pymanopt
# Package information
project = "Pymanopt"
author = "Jamie Townsend, Niklas Koep, Sebastian Weichwald"
copyright = f"2016-{datetime.date.today().year}, {author}"
release = version = pymanopt.__version__
# Build settings
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode"
]
source_suffix = ".rst"
master_doc = "index"
language = None
exclude_patterns = ["build", "*.egg*"]
# Output options
html_theme = "sphinx_rtd_theme"
html_show_sphinx = False
html_baseurl = "pymanopt.org"
htmlhelp_basename = "pymanoptdoc"
html_last_updated_fmt = ""
# autodoc
autodoc_default_options = {
"member-order": "bysource",
"members": True,
"undoc-members": True,
"show-inheritance": True
}
|
9616afb9e8c7a5a599096b588cd71a714e001e2b | dduplicated/fileManager.py | dduplicated/fileManager.py | import os
from threading import Thread
def _delete(path):
os.remove(path)
def _link(src, path):
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path)).start()
deleted_files.append(path)
if link:
Thread(target=_link, args=(src, path)).start()
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
| import os
from threading import Thread
def _delete(path: str, src: str, link: bool):
os.remove(path)
if link:
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path, src, link)).start()
deleted_files.append(path)
if link:
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
| Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems. | Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>
| Python | mit | messiasthi/dduplicated-cli | import os
from threading import Thread
def _delete(path):
os.remove(path)
def _link(src, path):
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path)).start()
deleted_files.append(path)
if link:
Thread(target=_link, args=(src, path)).start()
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com> | import os
from threading import Thread
def _delete(path: str, src: str, link: bool):
os.remove(path)
if link:
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path, src, link)).start()
deleted_files.append(path)
if link:
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
| <commit_before>import os
from threading import Thread
def _delete(path):
os.remove(path)
def _link(src, path):
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path)).start()
deleted_files.append(path)
if link:
Thread(target=_link, args=(src, path)).start()
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
<commit_msg>Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com><commit_after> | import os
from threading import Thread
def _delete(path: str, src: str, link: bool):
os.remove(path)
if link:
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path, src, link)).start()
deleted_files.append(path)
if link:
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
| import os
from threading import Thread
def _delete(path):
os.remove(path)
def _link(src, path):
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path)).start()
deleted_files.append(path)
if link:
Thread(target=_link, args=(src, path)).start()
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com>import os
from threading import Thread
def _delete(path: str, src: str, link: bool):
os.remove(path)
if link:
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path, src, link)).start()
deleted_files.append(path)
if link:
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
| <commit_before>import os
from threading import Thread
def _delete(path):
os.remove(path)
def _link(src, path):
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path)).start()
deleted_files.append(path)
if link:
Thread(target=_link, args=(src, path)).start()
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
<commit_msg>Fix in link action. Remove the `_link` method and add action to `_delete`, this fix concurrency problems.
Signed-off-by: messiasthi <8562fc1efba9a3c99753c749fdfb1b6932b70fbf@gmail.com><commit_after>import os
from threading import Thread
def _delete(path: str, src: str, link: bool):
os.remove(path)
if link:
os.symlink(src, path)
def manager_files(paths, link):
# The first file is preserved to not delete all files in directories.
first = True
src = ""
deleted_files = []
linked_files = []
errors = []
for path in paths:
if os.path.isfile(path):
if first:
first = False
src = path
else:
Thread(target=_delete, args=(path, src, link)).start()
deleted_files.append(path)
if link:
linked_files.append(path)
else:
errors.append("Not identified by file: \"{}\"".format(path))
return {"preserved": src, "linked_files": linked_files, "deleted_files": deleted_files, "errors": errors}
# Try The Voight-Kampff if you not recognize if is a replicant or not, all is suspect
def manager(duplicates, create_link=False):
if len(duplicates) == 0:
return None
processed_files = []
for files_by_hash in duplicates.values():
processed_files.append(manager_files(files_by_hash, create_link))
return processed_files
def delete(duplicates):
return manager(duplicates)
def link(duplicates):
return manager(duplicates, True)
|
64147133f2140777c5b3aafcdf7510d816dd6462 | core/data/DataTransformer.py | core/data/DataTransformer.py | """
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
return reslicer.GetOutput()
| """
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
# reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
reslicer.Update()
return reslicer.GetOutput()
| Fix for data transformer giving no output. | Fix for data transformer giving no output.
| Python | mit | berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop | """
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
return reslicer.GetOutput()
Fix for data transformer giving no output. | """
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
# reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
reslicer.Update()
return reslicer.GetOutput()
| <commit_before>"""
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
return reslicer.GetOutput()
<commit_msg>Fix for data transformer giving no output.<commit_after> | """
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
# reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
reslicer.Update()
return reslicer.GetOutput()
| """
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
return reslicer.GetOutput()
Fix for data transformer giving no output."""
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
# reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
reslicer.Update()
return reslicer.GetOutput()
| <commit_before>"""
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
return reslicer.GetOutput()
<commit_msg>Fix for data transformer giving no output.<commit_after>"""
DataTransformer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageReslice
class DataTransformer(object):
"""DataTransformer is a class that can transform a given dataset"""
def __init__(self):
super(DataTransformer, self).__init__()
def TransformImageData(self, imageData, transform):
"""
:type imageData: vtkImageData
:type transform: vtkTransform
"""
reslicer = vtkImageReslice()
reslicer.SetInterpolationModeToCubic()
# reslicer.SetAutoCropOutput(1) # Not sure if this is what we want
reslicer.SetInputData(imageData)
reslicer.SetResliceTransform(transform.GetInverse())
reslicer.Update()
return reslicer.GetOutput()
|
d708fa46b135fb1104d827ec4e64412f0028d94e | pyleus/compat.py | pyleus/compat.py | import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO
_ = StringIO
| import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO # pyflakes
_ = StringIO # pyflakes
| Add comments about pyflakes appeasement | Add comments about pyflakes appeasement
| Python | apache-2.0 | poros/pyleus,patricklucas/pyleus,mzbyszynski/pyleus,Yelp/pyleus,imcom/pyleus,dapuck/pyleus,stallman-cui/pyleus,stallman-cui/pyleus,mzbyszynski/pyleus,imcom/pyleus,poros/pyleus,ecanzonieri/pyleus,Yelp/pyleus,imcom/pyleus,jirafe/pyleus,jirafe/pyleus,ecanzonieri/pyleus,patricklucas/pyleus,dapuck/pyleus | import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO
_ = StringIO
Add comments about pyflakes appeasement | import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO # pyflakes
_ = StringIO # pyflakes
| <commit_before>import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO
_ = StringIO
<commit_msg>Add comments about pyflakes appeasement<commit_after> | import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO # pyflakes
_ = StringIO # pyflakes
| import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO
_ = StringIO
Add comments about pyflakes appeasementimport sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO # pyflakes
_ = StringIO # pyflakes
| <commit_before>import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO
_ = StringIO
<commit_msg>Add comments about pyflakes appeasement<commit_after>import sys
if sys.version_info[0] < 3:
from cStringIO import StringIO
BytesIO = StringIO
else:
from io import BytesIO
from io import StringIO
_ = BytesIO # pyflakes
_ = StringIO # pyflakes
|
f21ebbaabb5ce38432961b7786b78ad4d23f3259 | django_mercadopago/urls.py | django_mercadopago/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification, name='notifications'),
]
| Add a view to the notifications name (for reversing) | Add a view to the notifications name (for reversing)
| Python | isc | asermax/django-mercadopago | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification),
]
Add a view to the notifications name (for reversing) | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification, name='notifications'),
]
| <commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification),
]
<commit_msg>Add a view to the notifications name (for reversing)<commit_after> | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification, name='notifications'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification),
]
Add a view to the notifications name (for reversing)from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification, name='notifications'),
]
| <commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification),
]
<commit_msg>Add a view to the notifications name (for reversing)<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^notifications$', views.create_notification, name='notifications'),
]
|
748e713fdb2f3126f463651562e1f938a7ce1511 | src/dcm/agent/scripts/common-linux/general_cleanup.py | src/dcm/agent/scripts/common-linux/general_cleanup.py | import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
| import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
files_to_clean = ["/var/lib/waagent/provisioned"]
for f in files_to_clean:
try:
os.remove(f)
except OSError as osEx:
# in many cases the file will not be there
if osEx.errno != 2:
sys.stderr.write(str(osEx))
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
| Clean Azure images before snapshot | Clean Azure images before snapshot
For waagent to provision the following file must not exist:
/var/lib/waagent/provisioned
| Python | apache-2.0 | buzztroll/unix-agent,JPWKU/unix-agent,buzztroll/unix-agent,buzztroll/unix-agent,buzztroll/unix-agent,JPWKU/unix-agent,enStratus/unix-agent,enStratus/unix-agent,JPWKU/unix-agent,enStratus/unix-agent,JPWKU/unix-agent,enStratus/unix-agent | import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
Clean Azure images before snapshot
For waagent to provision the following file must not exist:
/var/lib/waagent/provisioned | import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
files_to_clean = ["/var/lib/waagent/provisioned"]
for f in files_to_clean:
try:
os.remove(f)
except OSError as osEx:
# in many cases the file will not be there
if osEx.errno != 2:
sys.stderr.write(str(osEx))
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
| <commit_before>import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
<commit_msg>Clean Azure images before snapshot
For waagent to provision the following file must not exist:
/var/lib/waagent/provisioned<commit_after> | import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
files_to_clean = ["/var/lib/waagent/provisioned"]
for f in files_to_clean:
try:
os.remove(f)
except OSError as osEx:
# in many cases the file will not be there
if osEx.errno != 2:
sys.stderr.write(str(osEx))
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
| import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
Clean Azure images before snapshot
For waagent to provision the following file must not exist:
/var/lib/waagent/provisionedimport os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
files_to_clean = ["/var/lib/waagent/provisioned"]
for f in files_to_clean:
try:
os.remove(f)
except OSError as osEx:
# in many cases the file will not be there
if osEx.errno != 2:
sys.stderr.write(str(osEx))
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
| <commit_before>import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
<commit_msg>Clean Azure images before snapshot
For waagent to provision the following file must not exist:
/var/lib/waagent/provisioned<commit_after>import os
import sys
import logging
_g_logger = logging.getLogger(__name__)
def main(bin_path, dcm_basedir, dbfile):
dirs_to_clean = [os.path.join(dcm_basedir, 'logs'),
os.path.join(dcm_basedir, 'secure')]
for clean_dir in dirs_to_clean:
for (dirpath, dirname, filenames) in os.walk(clean_dir):
for file in filenames:
if not os.path.join(dirpath, file) == dbfile:
cmd = '%s %s' % \
(os.path.join(bin_path, 'secureDelete'),
os.path.join(dirpath, file))
os.system(cmd)
files_to_clean = ["/var/lib/waagent/provisioned"]
for f in files_to_clean:
try:
os.remove(f)
except OSError as osEx:
# in many cases the file will not be there
if osEx.errno != 2:
sys.stderr.write(str(osEx))
if __name__ == "__main__":
try:
dcm_basedir = os.environ.get('DCM_BASEDIR')
dbfile = sys.argv[1]
except Exception as ex:
_g_logger.exception("general_cleanup failed: " + str(ex))
sys.exit(1)
bin_path = os.path.dirname(os.path.abspath(__file__))
main(bin_path, dcm_basedir, dbfile)
|
25cd9c428a467fce92dae98476515e43806bc20c | iati/core/codelists.py | iati/core/codelists.py | from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
| from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it
TODO: Define relevant tests and add error handling
"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
| Add codelist xml parsing TODO | Add codelist xml parsing TODO
| Python | mit | IATI/iati.core,IATI/iati.core | from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
Add codelist xml parsing TODO | from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it
TODO: Define relevant tests and add error handling
"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
| <commit_before>from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
<commit_msg>Add codelist xml parsing TODO<commit_after> | from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it
TODO: Define relevant tests and add error handling
"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
| from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
Add codelist xml parsing TODOfrom lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it
TODO: Define relevant tests and add error handling
"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
| <commit_before>from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
<commit_msg>Add codelist xml parsing TODO<commit_after>from lxml import etree
import iati.core.resources
class Codelist(object):
"""Representation of a Codelist as defined within the IATI SSOT"""
def __init__(self, name=None, path=None, xml=None):
def parse_from_xml(xml):
"""Parse a Codelist from the XML that defines it
TODO: Define relevant tests and add error handling
"""
tree = etree.fromstring(xml)
self.name = tree.attrib['name']
for codeEl in tree.findall('codelist-items/codelist-item'):
value = codeEl.find('code').text
name = codeEl.find('description/narrative').text
self.add_code(iati.core.codelists.Code(value, name))
self.codes = []
self.name = name
self.path = path
if xml:
parse_from_xml(xml)
def add_code(self, code):
"""Add a Code to the Codelist"""
if isinstance(code, Code):
self.codes.append(code)
class Code(object):
"""Representation of a Code contained within a Codelist"""
def __init__(self, value=None, name=None):
self.name = name
self.value = value
|
67bcacb60a9e24970345d5f6daf3ba3649677b5c | froide/campaign/listeners.py | froide/campaign/listeners.py | from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
| from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if '@' in reference:
parts = reference.split('@', 1)
else:
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
| Allow connecting froide_campaigns to campaign app | Allow connecting froide_campaigns to campaign app | Python | mit | fin/froide,fin/froide,fin/froide,fin/froide | from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
Allow connecting froide_campaigns to campaign app | from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if '@' in reference:
parts = reference.split('@', 1)
else:
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
| <commit_before>from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
<commit_msg>Allow connecting froide_campaigns to campaign app<commit_after> | from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if '@' in reference:
parts = reference.split('@', 1)
else:
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
| from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
Allow connecting froide_campaigns to campaign appfrom .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if '@' in reference:
parts = reference.split('@', 1)
else:
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
| <commit_before>from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
<commit_msg>Allow connecting froide_campaigns to campaign app<commit_after>from .utils import connect_foirequest
def connect_campaign(sender, **kwargs):
reference = kwargs.get('reference')
if not reference:
return
if '@' in reference:
parts = reference.split('@', 1)
else:
parts = reference.split(':', 1)
if len(parts) != 2:
return
namespace = parts[0]
connect_foirequest(sender, namespace)
|
ffd917c5ace8e815b185495aec17cf47b0a7648a | storage_service/administration/tests/test_languages.py | storage_service/administration/tests/test_languages.py | from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
super(TestLanguageSwitching, cls).setUpClass()
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
| from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
super(TestLanguageSwitching, cls).setUpClass()
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
| Fix integrity error reusing db in tests | Fix integrity error reusing db in tests
Base `setUpClass` needs to be called first so the transaction is initialized
before we mutate the data.
This solves a conflic raised when using `--reuse-db`.
| Python | agpl-3.0 | artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service | from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
super(TestLanguageSwitching, cls).setUpClass()
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
Fix integrity error reusing db in tests
Base `setUpClass` needs to be called first so the transaction is initialized
before we mutate the data.
This solves a conflic raised when using `--reuse-db`. | from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
super(TestLanguageSwitching, cls).setUpClass()
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
| <commit_before>from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
super(TestLanguageSwitching, cls).setUpClass()
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
<commit_msg>Fix integrity error reusing db in tests
Base `setUpClass` needs to be called first so the transaction is initialized
before we mutate the data.
This solves a conflic raised when using `--reuse-db`.<commit_after> | from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
super(TestLanguageSwitching, cls).setUpClass()
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
| from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
super(TestLanguageSwitching, cls).setUpClass()
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
Fix integrity error reusing db in tests
Base `setUpClass` needs to be called first so the transaction is initialized
before we mutate the data.
This solves a conflic raised when using `--reuse-db`.from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
super(TestLanguageSwitching, cls).setUpClass()
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
| <commit_before>from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
super(TestLanguageSwitching, cls).setUpClass()
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
<commit_msg>Fix integrity error reusing db in tests
Base `setUpClass` needs to be called first so the transaction is initialized
before we mutate the data.
This solves a conflic raised when using `--reuse-db`.<commit_after>from django.contrib.auth.models import User
from django.test import TestCase, override_settings
class TestLanguageSwitching(TestCase):
@classmethod
def setUpClass(cls):
super(TestLanguageSwitching, cls).setUpClass()
User.objects.create_user(
username="admin", password="admin", email="admin@example.com"
)
def setUp(self):
self.client.login(username="admin", password="admin")
def test_displays_language_form(self):
self.client.get("/administration/language/")
self.assertTemplateUsed("language_form.html")
@override_settings(LANGUAGE_CODE="es")
def test_selects_correct_language_on_form(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="es-es")
def test_falls_back_to_generic_language(self):
response = self.client.get("/administration/language/")
assert response.context["language_selection"] == "es"
@override_settings(LANGUAGE_CODE="en-us")
def test_switch_language(self):
response = self.client.post(
"/i18n/setlang/",
{"language": "fr", "next": "/administration/language/"},
follow=True,
)
assert response.context["language_selection"] == "fr"
|
cc86cb09854cc5656a99e209b27a4c9d9a407bb1 | turbinia/config/turbinia_config.py | turbinia/config/turbinia_config.py | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = 'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = None
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
| # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = u'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = u'turbinia-psq'
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
| Make turbinia-psq the default pubsub queue name | Make turbinia-psq the default pubsub queue name
| Python | apache-2.0 | google/turbinia,google/turbinia,google/turbinia,google/turbinia,google/turbinia | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = 'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = None
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
Make turbinia-psq the default pubsub queue name | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = u'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = u'turbinia-psq'
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
| <commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = 'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = None
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
<commit_msg>Make turbinia-psq the default pubsub queue name<commit_after> | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = u'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = u'turbinia-psq'
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
| # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = 'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = None
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
Make turbinia-psq the default pubsub queue name# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = u'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = u'turbinia-psq'
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
| <commit_before># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = 'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = None
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
<commit_msg>Make turbinia-psq the default pubsub queue name<commit_after># Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Dummy Turbinia config file."""
# Turbinia Config
# Valid values are 'PSQ' or 'Celery'
TASK_MANAGER = u'PSQ'
# Time between heartbeats in seconds
WORKER_HEARTBEAT = 600
# Timeout between heartbeats for Workers to be considered inactive
WORKER_TIMEOUT = 3600
# GCE configuration
PROJECT = None
ZONE = None
INSTANCE = None
DEVICE_NAME = None
SCRATCH_PATH = None
BUCKET_NAME = None
PSQ_TOPIC = u'turbinia-psq'
# Topic Turbinia will listen on for new Artifact events
PUBSUB_TOPIC = None
# Redis configuration
REDIS_HOST = None
REDIS_PORT = None
# Timesketch configuration
TIMESKETCH_HOST = None
TIMESKETCH_USER = None
TIMESKETCH_PASSWORD = None
|
138fd41960013b11ae1c49d46140b69c24b27abd | tests/stonemason/service/tileserver/test_tileserver.py | tests/stonemason/service/tileserver/test_tileserver.py | # -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual('Hello World!', resp.data)
| # -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
| Fix a compatible bug. Response data is binary in Python 3. | Fix: Fix a compatible bug. Response data is binary in Python 3.
| Python | mit | Kotaimen/stonemason,Kotaimen/stonemason | # -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual('Hello World!', resp.data)
Fix: Fix a compatible bug. Response data is binary in Python 3. | # -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
| <commit_before># -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual('Hello World!', resp.data)
<commit_msg>Fix: Fix a compatible bug. Response data is binary in Python 3.<commit_after> | # -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
| # -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual('Hello World!', resp.data)
Fix: Fix a compatible bug. Response data is binary in Python 3.# -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
| <commit_before># -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual('Hello World!', resp.data)
<commit_msg>Fix: Fix a compatible bug. Response data is binary in Python 3.<commit_after># -*- encoding: utf-8 -*-
import os
import unittest
from stonemason.service.tileserver import AppBuilder
class TestExample(unittest.TestCase):
def setUp(self):
os.environ['EXAMPLE_APP_ENV'] = 'dev'
app = AppBuilder().build()
self.client = app.test_client()
def test_app(self):
resp = self.client.get('/')
self.assertEqual(b'Hello World!', resp.data)
|
a0444025e6d861dfef29d307435fa74f10362890 | src/hireme/server.py | src/hireme/server.py | # -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory():
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
| # -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory(*args, **kwargs):
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
| Allow arbitrary parameters to be passed | Allow arbitrary parameters to be passed
| Python | bsd-2-clause | cutoffthetop/hireme | # -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory():
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
Allow arbitrary parameters to be passed | # -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory(*args, **kwargs):
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
| <commit_before># -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory():
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
<commit_msg>Allow arbitrary parameters to be passed<commit_after> | # -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory(*args, **kwargs):
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
| # -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory():
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
Allow arbitrary parameters to be passed# -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory(*args, **kwargs):
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
| <commit_before># -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory():
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
<commit_msg>Allow arbitrary parameters to be passed<commit_after># -*- coding: utf-8 -*-
import flask
from . import rendering
from . import task1, task2
def app_factory(*args, **kwargs):
"""Create a new Flask instance and configure the URL map."""
app = flask.Flask(import_name=__package__)
app.add_url_rule('/', 'index', rendering.render_index)
app.add_url_rule('/task1', 'task1', task1.solve, methods=['GET', 'POST'])
app.add_url_rule('/task2', 'task2', task2.solve, methods=['GET', 'POST'])
return app
def run_local(*args, **kwargs):
"""Run the app on a local development server with debugging enabled."""
app = app_factory()
app.debug = True
app.run()
|
be127957f35a4673c95a81884adf3484943af079 | future/tests/test_imports_urllib.py | future/tests/test_imports_urllib.py | import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
| from __future__ import absolute_import, print_function
import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
| Tweak to a noisy test module | Tweak to a noisy test module
| Python | mit | krischer/python-future,michaelpacer/python-future,krischer/python-future,michaelpacer/python-future,QuLogic/python-future,QuLogic/python-future,PythonCharmers/python-future,PythonCharmers/python-future | import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
Tweak to a noisy test module | from __future__ import absolute_import, print_function
import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
| <commit_before>import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
<commit_msg>Tweak to a noisy test module<commit_after> | from __future__ import absolute_import, print_function
import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
| import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
Tweak to a noisy test modulefrom __future__ import absolute_import, print_function
import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
| <commit_before>import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
<commit_msg>Tweak to a noisy test module<commit_after>from __future__ import absolute_import, print_function
import unittest
import sys
print([m for m in sys.modules if m.startswith('urllib')])
class MyTest(unittest.TestCase):
def test_urllib(self):
import urllib
print(urllib.__file__)
from future import standard_library
with standard_library.hooks():
import urllib.response
print(urllib.__file__)
print(urllib.response.__file__)
unittest.main()
|
cad499437969e6b1b23ab7d2639003d4ec6a86b1 | datasets/ccgois/transform.py | datasets/ccgois/transform.py | import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1],
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
print datasets[0]['resources']
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
| import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1]
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
| Fix the naming of the newly created resources | Fix the naming of the newly created resources
| Python | mit | nhsengland/publish-o-matic | import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1],
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
print datasets[0]['resources']
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
Fix the naming of the newly created resources | import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1]
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
| <commit_before>import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1],
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
print datasets[0]['resources']
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
<commit_msg>Fix the naming of the newly created resources<commit_after> | import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1]
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
| import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1],
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
print datasets[0]['resources']
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
Fix the naming of the newly created resourcesimport datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1]
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
| <commit_before>import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1],
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
print datasets[0]['resources']
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
<commit_msg>Fix the naming of the newly created resources<commit_after>import datetime
import json
import sys
import ffs
import re
from publish.lib.helpers import filename_for_resource, download_file
from publish.lib.upload import Uploader
def main(workspace):
DATA_DIR = ffs.Path(workspace)
datasets = json.load(open(DATA_DIR / 'ccgois_indicators.json'))
u = Uploader("ccgois")
for dataset in datasets:
resources = []
for resource in dataset['sources']:
resource['format'] = resource['filetype']
resource['name'] = resource['url'].split('/')[-1]
filename = filename_for_resource(resource)
path = DATA_DIR / filename
download_file(resource['url'], path)
print "Uploading to S3"
url = u.upload(path)
resource['url'] = url
resources.append(resource)
dataset['resources'] = resources
u.close()
json.dump(datasets, open(DATA_DIR / 'ccgois_indicators.json', 'w'))
|
3128c2be4cd44977638c81e22a24c956a273153a | allaccess/__init__.py | allaccess/__init__.py | """
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.5.1'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
| """
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.6.0dev'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
| Update master version to reflect dev status. | Update master version to reflect dev status.
| Python | bsd-2-clause | iXioN/django-all-access,vyscond/django-all-access,mlavin/django-all-access,iXioN/django-all-access,dpoirier/django-all-access,mlavin/django-all-access,dpoirier/django-all-access,vyscond/django-all-access | """
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.5.1'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
Update master version to reflect dev status. | """
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.6.0dev'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
| <commit_before>"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.5.1'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
<commit_msg>Update master version to reflect dev status.<commit_after> | """
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.6.0dev'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
| """
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.5.1'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
Update master version to reflect dev status."""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.6.0dev'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
| <commit_before>"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.5.1'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
<commit_msg>Update master version to reflect dev status.<commit_after>"""
django-all-access is a reusable application for user registration and authentication
from OAuth 1.0 and OAuth 2.0 providers such as Twitter and Facebook.
"""
__version__ = '0.6.0dev'
import logging
class NullHandler(logging.Handler):
"No-op logging handler."
def emit(self, record):
pass
# Configure null handler to prevent "No handlers could be found..." errors
logging.getLogger('allaccess').addHandler(NullHandler())
|
3842fef4a2f291b64d83a3977946b07c86ac46d6 | build/identfilter.py | build/identfilter.py | #!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
| #!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
| Fix spacing in the filter script | Fix spacing in the filter script
Conflicting file loading directives in Vim screwed up the tab stops.
| Python | mit | criptych/graphene,criptych/graphene,ebassi/graphene,criptych/graphene,criptych/graphene,ebassi/graphene | #!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
Fix spacing in the filter script
Conflicting file loading directives in Vim screwed up the tab stops. | #!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
| <commit_before>#!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
<commit_msg>Fix spacing in the filter script
Conflicting file loading directives in Vim screwed up the tab stops.<commit_after> | #!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
| #!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
Fix spacing in the filter script
Conflicting file loading directives in Vim screwed up the tab stops.#!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
| <commit_before>#!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
<commit_msg>Fix spacing in the filter script
Conflicting file loading directives in Vim screwed up the tab stops.<commit_after>#!/usr/bin/env python
import sys
import re
NUMBER_REGEX = re.compile(r'([0-9])([a-z])')
def to_camel_case(text):
# We only care about types that end with '_t'
if not text.endswith('_t'):
return text
res = []
for token in text[:-2].split('_'):
uc_token = token.title()
# We need to do this for types like graphene_point3d_t, which
# need to be transformed into GraphenePoint3D, not GraphenePoint3d
matches = NUMBER_REGEX.match(uc_token)
if matches and matches.group(2):
uc_token = ''.join([matches.group(1), matches.group(2).title])
res.append(uc_token)
return ''.join(res)
if __name__ == '__main__':
in_text = sys.stdin.read()
sys.stdout.write(to_camel_case(in_text))
|
f4cb832d61437ad6e871a1596393adc06ceafab9 | cookiecutter/utils.py | cookiecutter/utils.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
"""
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
| Add docstring. 4 spaces for consistency. | Add docstring. 4 spaces for consistency.
| Python | bsd-3-clause | lgp171188/cookiecutter,janusnic/cookiecutter,dajose/cookiecutter,agconti/cookiecutter,willingc/cookiecutter,letolab/cookiecutter,christabor/cookiecutter,cichm/cookiecutter,vincentbernat/cookiecutter,terryjbates/cookiecutter,atlassian/cookiecutter,foodszhang/cookiecutter,michaeljoseph/cookiecutter,cguardia/cookiecutter,alex/cookiecutter,lucius-feng/cookiecutter,lgp171188/cookiecutter,vincentbernat/cookiecutter,kkujawinski/cookiecutter,sp1rs/cookiecutter,atlassian/cookiecutter,hackebrot/cookiecutter,vintasoftware/cookiecutter,audreyr/cookiecutter,ionelmc/cookiecutter,cichm/cookiecutter,venumech/cookiecutter,nhomar/cookiecutter,nhomar/cookiecutter,Vauxoo/cookiecutter,ramiroluz/cookiecutter,Vauxoo/cookiecutter,hackebrot/cookiecutter,stevepiercy/cookiecutter,utek/cookiecutter,christabor/cookiecutter,ionelmc/cookiecutter,tylerdave/cookiecutter,drgarcia1986/cookiecutter,stevepiercy/cookiecutter,jhermann/cookiecutter,Springerle/cookiecutter,letolab/cookiecutter,jhermann/cookiecutter,0k/cookiecutter,ramiroluz/cookiecutter,terryjbates/cookiecutter,foodszhang/cookiecutter,sp1rs/cookiecutter,Springerle/cookiecutter,michaeljoseph/cookiecutter,takeflight/cookiecutter,0k/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,moi65/cookiecutter,lucius-feng/cookiecutter,cguardia/cookiecutter,audreyr/cookiecutter,alex/cookiecutter,utek/cookiecutter,benthomasson/cookiecutter,benthomasson/cookiecutter,moi65/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,takeflight/cookiecutter,janusnic/cookiecutter,vintasoftware/cookiecutter,tylerdave/cookiecutter,pjbull/cookiecutter,kkujawinski/cookiecutter,dajose/cookiecutter,agconti/cookiecutter,venumech/cookiecutter,drgarcia1986/cookiecutter | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
Add docstring. 4 spaces for consistency. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
"""
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
<commit_msg>Add docstring. 4 spaces for consistency.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
"""
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
Add docstring. 4 spaces for consistency.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
"""
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
<commit_msg>Add docstring. 4 spaces for consistency.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.utils
------------------
Helper functions used throughout Cookiecutter.
"""
import errno
import os
import sys
import contextlib
PY3 = sys.version > '3'
if PY3:
pass
else:
import codecs
def make_sure_path_exists(path):
"""
Ensures that a directory exists.
:param path: A directory path.
"""
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True
def unicode_open(filename, *args, **kwargs):
"""
Opens a file as usual on Python 3, and with UTF-8 encoding on Python 2.
:param filename: Name of file to open.
"""
kwargs['encoding'] = "utf-8"
if PY3:
return open(filename, *args, **kwargs)
return codecs.open(filename, *args, **kwargs)
@contextlib.contextmanager
def work_in(dirname=None):
"""
Context manager version of os.chdir. When exited, returns to the working
directory prior to entering.
"""
curdir = os.getcwd()
try:
if dirname is not None:
os.chdir(dirname)
yield
finally:
os.chdir(curdir)
|
20d02eef92d458dac890f1ab814ca146f2bd1853 | s3direct/urls.py | s3direct/urls.py | from django.conf.urls import patterns, url
from s3direct.views import get_upload_params
urlpatterns = patterns('',
url('^get_upload_params/',
get_upload_params, name='s3direct'),
)
| from django.conf.urls import url
from s3direct.views import get_upload_params
urlpatterns = [
url('^get_upload_params/', get_upload_params, name='s3direct')
]
| Update urlpatterns to use list of url() | Update urlpatterns to use list of url()
| Python | mit | AlexRiina/django-s3direct,yunojuno/django-s3-upload,yunojuno/django-s3-upload,bradleyg/django-s3direct,yunojuno/django-s3-upload,Artory/django-s3direct,Artory/django-s3direct,bradleyg/django-s3direct,Artory/django-s3direct,AlexRiina/django-s3direct,AlexRiina/django-s3direct,bradleyg/django-s3direct | from django.conf.urls import patterns, url
from s3direct.views import get_upload_params
urlpatterns = patterns('',
url('^get_upload_params/',
get_upload_params, name='s3direct'),
)
Update urlpatterns to use list of url() | from django.conf.urls import url
from s3direct.views import get_upload_params
urlpatterns = [
url('^get_upload_params/', get_upload_params, name='s3direct')
]
| <commit_before>from django.conf.urls import patterns, url
from s3direct.views import get_upload_params
urlpatterns = patterns('',
url('^get_upload_params/',
get_upload_params, name='s3direct'),
)
<commit_msg>Update urlpatterns to use list of url()<commit_after> | from django.conf.urls import url
from s3direct.views import get_upload_params
urlpatterns = [
url('^get_upload_params/', get_upload_params, name='s3direct')
]
| from django.conf.urls import patterns, url
from s3direct.views import get_upload_params
urlpatterns = patterns('',
url('^get_upload_params/',
get_upload_params, name='s3direct'),
)
Update urlpatterns to use list of url()from django.conf.urls import url
from s3direct.views import get_upload_params
urlpatterns = [
url('^get_upload_params/', get_upload_params, name='s3direct')
]
| <commit_before>from django.conf.urls import patterns, url
from s3direct.views import get_upload_params
urlpatterns = patterns('',
url('^get_upload_params/',
get_upload_params, name='s3direct'),
)
<commit_msg>Update urlpatterns to use list of url()<commit_after>from django.conf.urls import url
from s3direct.views import get_upload_params
urlpatterns = [
url('^get_upload_params/', get_upload_params, name='s3direct')
]
|
cff73bbff666745a72a8ffc6750c33aebb80fa4b | feature_extraction.py | feature_extraction.py | from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_mask_labels():
mask_labels = []
for image in _get_masks():
mask_labels.append((image.filename, 255 in image.getdata()))
return mask_labels
| import re
import glob
from PIL import Image
from tqdm import tqdm
def _get_file_root_name(file_path):
file_root_name_expression = re.compile(r'/[^/]*\.', re.IGNORECASE)
return file_root_name_expression.search(file_path).group(0)[1:-1]
def _get_feature_label_images():
TRAIN_FILES = './data/train/*.tif'
mask_expression = re.compile(r'_mask\.tif', re.IGNORECASE)
features = {}
labels = {}
image_paths = glob.glob(TRAIN_FILES)
with tqdm(desc='Reading Images from Disk', total=len(image_paths), unit='image') as progress_bar:
for file_name in image_paths:
progress_bar.update()
with Image.open(file_name) as image:
root_name = _get_file_root_name(file_name)
if mask_expression.search(file_name):
labels[root_name[:-5]] = image.getdata()
else:
features[root_name] = image.getdata()
assert len(features) == len(labels)
return [(features[root_name], labels[root_name]) for root_name, label in labels.items()]
def get_detection_data():
features = []
labels = []
features_labels = _get_feature_label_images()
with tqdm(desc='Extracting Features', total=len(features_labels), unit='image') as progress_bar:
for feature, label in features_labels:
progress_bar.update()
features.append(list(feature)[:784])
extracted_label = [1.0, 0.0] if 255 in label else [0.0, 1.0]
labels.append(extracted_label)
return features, labels
| Return all data instead of just mask data | Return all data instead of just mask data
| Python | mit | Brok-Bucholtz/Ultrasound-Nerve-Segmentation | from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_mask_labels():
mask_labels = []
for image in _get_masks():
mask_labels.append((image.filename, 255 in image.getdata()))
return mask_labels
Return all data instead of just mask data | import re
import glob
from PIL import Image
from tqdm import tqdm
def _get_file_root_name(file_path):
file_root_name_expression = re.compile(r'/[^/]*\.', re.IGNORECASE)
return file_root_name_expression.search(file_path).group(0)[1:-1]
def _get_feature_label_images():
TRAIN_FILES = './data/train/*.tif'
mask_expression = re.compile(r'_mask\.tif', re.IGNORECASE)
features = {}
labels = {}
image_paths = glob.glob(TRAIN_FILES)
with tqdm(desc='Reading Images from Disk', total=len(image_paths), unit='image') as progress_bar:
for file_name in image_paths:
progress_bar.update()
with Image.open(file_name) as image:
root_name = _get_file_root_name(file_name)
if mask_expression.search(file_name):
labels[root_name[:-5]] = image.getdata()
else:
features[root_name] = image.getdata()
assert len(features) == len(labels)
return [(features[root_name], labels[root_name]) for root_name, label in labels.items()]
def get_detection_data():
features = []
labels = []
features_labels = _get_feature_label_images()
with tqdm(desc='Extracting Features', total=len(features_labels), unit='image') as progress_bar:
for feature, label in features_labels:
progress_bar.update()
features.append(list(feature)[:784])
extracted_label = [1.0, 0.0] if 255 in label else [0.0, 1.0]
labels.append(extracted_label)
return features, labels
| <commit_before>from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_mask_labels():
mask_labels = []
for image in _get_masks():
mask_labels.append((image.filename, 255 in image.getdata()))
return mask_labels
<commit_msg>Return all data instead of just mask data<commit_after> | import re
import glob
from PIL import Image
from tqdm import tqdm
def _get_file_root_name(file_path):
file_root_name_expression = re.compile(r'/[^/]*\.', re.IGNORECASE)
return file_root_name_expression.search(file_path).group(0)[1:-1]
def _get_feature_label_images():
TRAIN_FILES = './data/train/*.tif'
mask_expression = re.compile(r'_mask\.tif', re.IGNORECASE)
features = {}
labels = {}
image_paths = glob.glob(TRAIN_FILES)
with tqdm(desc='Reading Images from Disk', total=len(image_paths), unit='image') as progress_bar:
for file_name in image_paths:
progress_bar.update()
with Image.open(file_name) as image:
root_name = _get_file_root_name(file_name)
if mask_expression.search(file_name):
labels[root_name[:-5]] = image.getdata()
else:
features[root_name] = image.getdata()
assert len(features) == len(labels)
return [(features[root_name], labels[root_name]) for root_name, label in labels.items()]
def get_detection_data():
features = []
labels = []
features_labels = _get_feature_label_images()
with tqdm(desc='Extracting Features', total=len(features_labels), unit='image') as progress_bar:
for feature, label in features_labels:
progress_bar.update()
features.append(list(feature)[:784])
extracted_label = [1.0, 0.0] if 255 in label else [0.0, 1.0]
labels.append(extracted_label)
return features, labels
| from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_mask_labels():
mask_labels = []
for image in _get_masks():
mask_labels.append((image.filename, 255 in image.getdata()))
return mask_labels
Return all data instead of just mask dataimport re
import glob
from PIL import Image
from tqdm import tqdm
def _get_file_root_name(file_path):
file_root_name_expression = re.compile(r'/[^/]*\.', re.IGNORECASE)
return file_root_name_expression.search(file_path).group(0)[1:-1]
def _get_feature_label_images():
TRAIN_FILES = './data/train/*.tif'
mask_expression = re.compile(r'_mask\.tif', re.IGNORECASE)
features = {}
labels = {}
image_paths = glob.glob(TRAIN_FILES)
with tqdm(desc='Reading Images from Disk', total=len(image_paths), unit='image') as progress_bar:
for file_name in image_paths:
progress_bar.update()
with Image.open(file_name) as image:
root_name = _get_file_root_name(file_name)
if mask_expression.search(file_name):
labels[root_name[:-5]] = image.getdata()
else:
features[root_name] = image.getdata()
assert len(features) == len(labels)
return [(features[root_name], labels[root_name]) for root_name, label in labels.items()]
def get_detection_data():
features = []
labels = []
features_labels = _get_feature_label_images()
with tqdm(desc='Extracting Features', total=len(features_labels), unit='image') as progress_bar:
for feature, label in features_labels:
progress_bar.update()
features.append(list(feature)[:784])
extracted_label = [1.0, 0.0] if 255 in label else [0.0, 1.0]
labels.append(extracted_label)
return features, labels
| <commit_before>from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_mask_labels():
mask_labels = []
for image in _get_masks():
mask_labels.append((image.filename, 255 in image.getdata()))
return mask_labels
<commit_msg>Return all data instead of just mask data<commit_after>import re
import glob
from PIL import Image
from tqdm import tqdm
def _get_file_root_name(file_path):
file_root_name_expression = re.compile(r'/[^/]*\.', re.IGNORECASE)
return file_root_name_expression.search(file_path).group(0)[1:-1]
def _get_feature_label_images():
TRAIN_FILES = './data/train/*.tif'
mask_expression = re.compile(r'_mask\.tif', re.IGNORECASE)
features = {}
labels = {}
image_paths = glob.glob(TRAIN_FILES)
with tqdm(desc='Reading Images from Disk', total=len(image_paths), unit='image') as progress_bar:
for file_name in image_paths:
progress_bar.update()
with Image.open(file_name) as image:
root_name = _get_file_root_name(file_name)
if mask_expression.search(file_name):
labels[root_name[:-5]] = image.getdata()
else:
features[root_name] = image.getdata()
assert len(features) == len(labels)
return [(features[root_name], labels[root_name]) for root_name, label in labels.items()]
def get_detection_data():
features = []
labels = []
features_labels = _get_feature_label_images()
with tqdm(desc='Extracting Features', total=len(features_labels), unit='image') as progress_bar:
for feature, label in features_labels:
progress_bar.update()
features.append(list(feature)[:784])
extracted_label = [1.0, 0.0] if 255 in label else [0.0, 1.0]
labels.append(extracted_label)
return features, labels
|
0cf60c650b81d2d396d673e4256be19a5193774e | app/main/helpers/s3.py | app/main/helpers/s3.py | import os
import boto
import datetime
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
key.set_contents_from_file(file)
key.set_acl(acl)
| import os
import boto
import datetime
import mimetypes
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
mimetype, _ = mimetypes.guess_type(key.name)
key.set_contents_from_file(file, headers={'Content-Type': mimetype})
key.set_acl(acl)
| Set Content-Type header for S3 uploads | Set Content-Type header for S3 uploads
Current document updates do not preserve the content type - it is reset to "application/octet-stream"
We should set the correct content type so that browsers know what to do when users access the documents.
| Python | mit | alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend | import os
import boto
import datetime
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
key.set_contents_from_file(file)
key.set_acl(acl)
Set Content-Type header for S3 uploads
Current document updates do not preserve the content type - it is reset to "application/octet-stream"
We should set the correct content type so that browsers know what to do when users access the documents. | import os
import boto
import datetime
import mimetypes
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
mimetype, _ = mimetypes.guess_type(key.name)
key.set_contents_from_file(file, headers={'Content-Type': mimetype})
key.set_acl(acl)
| <commit_before>import os
import boto
import datetime
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
key.set_contents_from_file(file)
key.set_acl(acl)
<commit_msg>Set Content-Type header for S3 uploads
Current document updates do not preserve the content type - it is reset to "application/octet-stream"
We should set the correct content type so that browsers know what to do when users access the documents.<commit_after> | import os
import boto
import datetime
import mimetypes
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
mimetype, _ = mimetypes.guess_type(key.name)
key.set_contents_from_file(file, headers={'Content-Type': mimetype})
key.set_acl(acl)
| import os
import boto
import datetime
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
key.set_contents_from_file(file)
key.set_acl(acl)
Set Content-Type header for S3 uploads
Current document updates do not preserve the content type - it is reset to "application/octet-stream"
We should set the correct content type so that browsers know what to do when users access the documents.import os
import boto
import datetime
import mimetypes
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
mimetype, _ = mimetypes.guess_type(key.name)
key.set_contents_from_file(file, headers={'Content-Type': mimetype})
key.set_acl(acl)
| <commit_before>import os
import boto
import datetime
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
key.set_contents_from_file(file)
key.set_acl(acl)
<commit_msg>Set Content-Type header for S3 uploads
Current document updates do not preserve the content type - it is reset to "application/octet-stream"
We should set the correct content type so that browsers know what to do when users access the documents.<commit_after>import os
import boto
import datetime
import mimetypes
class S3(object):
def __init__(self, bucket_name=None, host='s3-eu-west-1.amazonaws.com'):
conn = boto.connect_s3(host=host)
self.bucket_name = bucket_name
self.bucket = conn.get_bucket(bucket_name)
def save(self, path, name, file, acl='public-read'):
timestamp = datetime.datetime.utcnow().isoformat()
full_path = os.path.join(path, name)
if self.bucket.get_key(full_path):
self.bucket.copy_key(
os.path.join(path, '{}-{}'.format(timestamp, name)),
self.bucket_name,
full_path
)
key = self.bucket.new_key(full_path)
mimetype, _ = mimetypes.guess_type(key.name)
key.set_contents_from_file(file, headers={'Content-Type': mimetype})
key.set_acl(acl)
|
8c5aca4b9957e883a9dab8c95933de7285ab335b | login/middleware.py | login/middleware.py | from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (ACTIVATE_PATH in request.path
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) | from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (request.path.startswith(ACTIVATE_PATH)
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) | Revert trying to fix activation redirection bug | Revert trying to fix activation redirection bug
This reverts commit c2d63335062abea4cece32bd01132bcf8dce44f2.
It seems like the commit doesn't actually do anything to alleviate the
bug. Since it's also more lenient with its checks, I'll rather revert
it.
| Python | agpl-3.0 | openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform | from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (ACTIVATE_PATH in request.path
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH)Revert trying to fix activation redirection bug
This reverts commit c2d63335062abea4cece32bd01132bcf8dce44f2.
It seems like the commit doesn't actually do anything to alleviate the
bug. Since it's also more lenient with its checks, I'll rather revert
it. | from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (request.path.startswith(ACTIVATE_PATH)
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) | <commit_before>from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (ACTIVATE_PATH in request.path
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH)<commit_msg>Revert trying to fix activation redirection bug
This reverts commit c2d63335062abea4cece32bd01132bcf8dce44f2.
It seems like the commit doesn't actually do anything to alleviate the
bug. Since it's also more lenient with its checks, I'll rather revert
it.<commit_after> | from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (request.path.startswith(ACTIVATE_PATH)
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) | from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (ACTIVATE_PATH in request.path
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH)Revert trying to fix activation redirection bug
This reverts commit c2d63335062abea4cece32bd01132bcf8dce44f2.
It seems like the commit doesn't actually do anything to alleviate the
bug. Since it's also more lenient with its checks, I'll rather revert
it.from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (request.path.startswith(ACTIVATE_PATH)
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) | <commit_before>from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (ACTIVATE_PATH in request.path
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH)<commit_msg>Revert trying to fix activation redirection bug
This reverts commit c2d63335062abea4cece32bd01132bcf8dce44f2.
It seems like the commit doesn't actually do anything to alleviate the
bug. Since it's also more lenient with its checks, I'll rather revert
it.<commit_after>from django.conf import settings
from django.http import HttpResponseRedirect
DETACH_PATH = '/user/detach'
ACTIVATE_PATH = '/user/activate'
class DetachMiddleware(object):
def process_request(self, request):
if not request.path == '/login/' \
and not request.path.startswith('/api') \
and not request.user.is_anonymous:
if not request.user.is_native:
if not (request.path == DETACH_PATH
or request.path.startswith('/logout')):
return HttpResponseRedirect(DETACH_PATH)
elif not request.user.is_mail_verified \
and not (request.path.startswith(ACTIVATE_PATH)
or request.path.startswith('/logout')):
return HttpResponseRedirect(ACTIVATE_PATH) |
7d862be1aba5a062eeaf54ada9587278e7e93f5b | apps/provider/urls.py | apps/provider/urls.py | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
) | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
| Change fhir practitioner url and add organization url | Change fhir practitioner url and add organization url
| Python | apache-2.0 | TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client,TransparentHealth/hhs_oauth_client | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
)Change fhir practitioner url and add organization url | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
| <commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
)<commit_msg>Change fhir practitioner url and add organization url<commit_after> | from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
| from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
)Change fhir practitioner url and add organization urlfrom __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
| <commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
)<commit_msg>Change fhir practitioner url and add organization url<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
from django.conf.urls import patterns, include, url
from .views import *
urlpatterns = patterns('',
url(r'^pjson/push$', pjson_provider_push, name="pjson_provider_push"),
url(r'^fhir/practioner/push$', fhir_practitioner_push, name="fhir_practitioner_push"),
url(r'^fhir/organization/push$', fhir_organization_push, name="fhir_organization_push"),
)
|
9771428d7b0c4a2c0fe057e1030024b13344ccc7 | moa/device/__init__.py | moa/device/__init__.py |
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
|
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
| Clean the base device class. | Clean the base device class.
| Python | mit | matham/moa |
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
Clean the base device class. |
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
| <commit_before>
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
<commit_msg>Clean the base device class.<commit_after> |
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
|
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
Clean the base device class.
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
| <commit_before>
from moa.threading import CallbackQueue
from moa.base import MoaBase
from kivy.properties import BooleanProperty
from kivy.clock import Clock
try:
from Queue import Queue
except ImportError:
from queue import Queue
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
__events__ = ('on_restart', )
active = BooleanProperty(False)
_kivy_eventloop_queue = None
def __init__(self, allow_async=True, **kwargs):
super(Device, self).__init__(**kwargs)
if allow_async:
trigger = Clock.create_trigger(self._do_queue)
self._kivy_eventloop_queue = CallbackQueue(trigger)
def __del__(self):
self.deinit()
def _do_queue(self, *largs, **kwargs):
while 1:
try:
key, val = self._kivy_eventloop_queue.get()
except Queue.Empty:
return
if key == 'set':
setattr(*val)
elif key == 'call':
f, l, kw = val
f(*l, **kw)
def init(self, **kwargs):
pass
def restart(self, **kwargs):
pass
def on_restart(self, **kwargs):
pass
def deinit(self, **kwargs):
pass
<commit_msg>Clean the base device class.<commit_after>
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
def activate(self, **kwargs):
pass
def recover(self, **kwargs):
pass
def deactivate(self, **kwargs):
pass
|
ff19efc1b5a51bc4b29c82d32bfe066661dbadca | sonnet/src/conformance/api_test.py | sonnet/src/conformance/api_test.py | # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import six
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
if six.PY2:
mysnt = reload(mysnt)
else:
mysnt = importlib.reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
| # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import reload_module as reload
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
mysnt = reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
| Use six.moves to reference `reload`. | Use six.moves to reference `reload`.
PiperOrigin-RevId: 253199825
Change-Id: Ifb9bf182572900a813ea1b0dbbda60f82495eac1
| Python | apache-2.0 | deepmind/sonnet,deepmind/sonnet | # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import six
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
if six.PY2:
mysnt = reload(mysnt)
else:
mysnt = importlib.reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
Use six.moves to reference `reload`.
PiperOrigin-RevId: 253199825
Change-Id: Ifb9bf182572900a813ea1b0dbbda60f82495eac1 | # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import reload_module as reload
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
mysnt = reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
| <commit_before># Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import six
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
if six.PY2:
mysnt = reload(mysnt)
else:
mysnt = importlib.reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
<commit_msg>Use six.moves to reference `reload`.
PiperOrigin-RevId: 253199825
Change-Id: Ifb9bf182572900a813ea1b0dbbda60f82495eac1<commit_after> | # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import reload_module as reload
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
mysnt = reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
| # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import six
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
if six.PY2:
mysnt = reload(mysnt)
else:
mysnt = importlib.reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
Use six.moves to reference `reload`.
PiperOrigin-RevId: 253199825
Change-Id: Ifb9bf182572900a813ea1b0dbbda60f82495eac1# Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import reload_module as reload
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
mysnt = reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
| <commit_before># Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import six
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
if six.PY2:
mysnt = reload(mysnt)
else:
mysnt = importlib.reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
<commit_msg>Use six.moves to reference `reload`.
PiperOrigin-RevId: 253199825
Change-Id: Ifb9bf182572900a813ea1b0dbbda60f82495eac1<commit_after># Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for Sonnet's public API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import reload_module as reload
import sonnet as snt
from sonnet.src import test_utils
import tensorflow as tf
class PublicSymbolsTest(test_utils.TestCase):
def test_src_not_exported(self):
self.assertFalse(hasattr(snt, "src"))
def test_supports_reload(self):
mysnt = snt
for _ in range(2):
mysnt = reload(mysnt)
self.assertFalse(hasattr(mysnt, "src"))
if __name__ == "__main__":
# tf.enable_v2_behavior()
tf.test.main()
|
06907e310169db7084f9e40f93e60182ba6e6423 | python/animationBase.py | python/animationBase.py | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
ball = Ball(5, 9, 4)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
ball.updateValues(1 / 60.0)
ball.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0) | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
red = Ball(5, 9, 4)
blue = Ball(6, 9, 2, 2, 0, 0, 255)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
red.updateValues(1 / 60.0)
blue.updateValues(1 / 60.0)
red.drawOnMatrix(nextFrame)
blue.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0) | Add two balls to animation | Add two balls to animation
| Python | mit | DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
ball = Ball(5, 9, 4)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
ball.updateValues(1 / 60.0)
ball.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0)Add two balls to animation | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
red = Ball(5, 9, 4)
blue = Ball(6, 9, 2, 2, 0, 0, 255)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
red.updateValues(1 / 60.0)
blue.updateValues(1 / 60.0)
red.drawOnMatrix(nextFrame)
blue.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0) | <commit_before>#!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
ball = Ball(5, 9, 4)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
ball.updateValues(1 / 60.0)
ball.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0)<commit_msg>Add two balls to animation<commit_after> | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
red = Ball(5, 9, 4)
blue = Ball(6, 9, 2, 2, 0, 0, 255)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
red.updateValues(1 / 60.0)
blue.updateValues(1 / 60.0)
red.drawOnMatrix(nextFrame)
blue.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0) | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
ball = Ball(5, 9, 4)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
ball.updateValues(1 / 60.0)
ball.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0)Add two balls to animation#!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
red = Ball(5, 9, 4)
blue = Ball(6, 9, 2, 2, 0, 0, 255)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
red.updateValues(1 / 60.0)
blue.updateValues(1 / 60.0)
red.drawOnMatrix(nextFrame)
blue.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0) | <commit_before>#!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
ball = Ball(5, 9, 4)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
ball.updateValues(1 / 60.0)
ball.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0)<commit_msg>Add two balls to animation<commit_after>#!/usr/bin/env python
from rgbmatrix import RGBMatrix
import sys, time
from ball import Ball
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
numRows = 16
height = ledMatrix.height
width = ledMatrix.width
red = Ball(5, 9, 4)
blue = Ball(6, 9, 2, 2, 0, 0, 255)
try:
print "Press Ctrl + C to stop executing"
while True:
nextFrame = ledMatrix.CreateFrameCanvas()
red.updateValues(1 / 60.0)
blue.updateValues(1 / 60.0)
red.drawOnMatrix(nextFrame)
blue.drawOnMatrix(nextFrame)
ledMatrix.SwapOnVSync(nextFrame)
time.sleep(1 / 60.0)
except KeyboardInterrupt:
print "Exiting\n"
sys.exit(0) |
1adc660916eafe5937b96f1b5bc480185efc96ad | aospy_user/__init__.py | aospy_user/__init__.py | """aospy_user: Library of user-defined aospy objects."""
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
| """aospy_user: Library of user-defined aospy objects."""
from aospy import (LAT_STR, LON_STR, PHALF_STR, PFULL_STR, PLEVEL_STR,
TIME_STR, TIME_STR_IDEALIZED)
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
| Use aospy coord label constants | Use aospy coord label constants
| Python | apache-2.0 | spencerahill/aospy-obj-lib | """aospy_user: Library of user-defined aospy objects."""
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
Use aospy coord label constants | """aospy_user: Library of user-defined aospy objects."""
from aospy import (LAT_STR, LON_STR, PHALF_STR, PFULL_STR, PLEVEL_STR,
TIME_STR, TIME_STR_IDEALIZED)
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
| <commit_before>"""aospy_user: Library of user-defined aospy objects."""
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
<commit_msg>Use aospy coord label constants<commit_after> | """aospy_user: Library of user-defined aospy objects."""
from aospy import (LAT_STR, LON_STR, PHALF_STR, PFULL_STR, PLEVEL_STR,
TIME_STR, TIME_STR_IDEALIZED)
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
| """aospy_user: Library of user-defined aospy objects."""
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
Use aospy coord label constants"""aospy_user: Library of user-defined aospy objects."""
from aospy import (LAT_STR, LON_STR, PHALF_STR, PFULL_STR, PLEVEL_STR,
TIME_STR, TIME_STR_IDEALIZED)
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
| <commit_before>"""aospy_user: Library of user-defined aospy objects."""
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
<commit_msg>Use aospy coord label constants<commit_after>"""aospy_user: Library of user-defined aospy objects."""
from aospy import (LAT_STR, LON_STR, PHALF_STR, PFULL_STR, PLEVEL_STR,
TIME_STR, TIME_STR_IDEALIZED)
from . import regions
from . import units
from . import calcs
from . import variables
from . import runs
from . import models
from . import projs
from . import obj_from_name
from .obj_from_name import (to_proj, to_model, to_run, to_var, to_region,
to_iterable)
|
d739767df47d5fc7424ab40485cba18ab5e137b2 | integration_tests/tests/__init__.py | integration_tests/tests/__init__.py | import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
logging.basicConfig()
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
| import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
| Remove unnecessary logger configuration from integration tests. | [antenna] Remove unnecessary logger configuration from integration tests.
| Python | agpl-3.0 | PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC,PW-Sat2/PWSat2OBC | import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
logging.basicConfig()
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
[antenna] Remove unnecessary logger configuration from integration tests. | import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
| <commit_before>import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
logging.basicConfig()
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
<commit_msg>[antenna] Remove unnecessary logger configuration from integration tests.<commit_after> | import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
| import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
logging.basicConfig()
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
[antenna] Remove unnecessary logger configuration from integration tests.import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
| <commit_before>import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
logging.basicConfig()
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
<commit_msg>[antenna] Remove unnecessary logger configuration from integration tests.<commit_after>import logging
import os
import colorlog
import extensions
if os.getenv("CLICOLOR_FORCE") == "1":
print "Forcing colors"
import colorama
colorama.deinit()
def _setup_log():
root_logger = logging.getLogger()
handler = colorlog.StreamHandler()
formatter = colorlog.ColoredFormatter(
"%(log_color)s%(asctime)-15s %(levelname)s: [%(name)s] %(message)s",
log_colors={
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red,bg_white',
}
)
handler.setFormatter(formatter)
root_logger.addHandler(handler)
root_logger.setLevel(logging.DEBUG)
def setup():
_setup_log()
extensions.initialize_extensions()
extensions.set_up_once()
def tearDown():
extensions.tear_down_once()
|
75a9c9b870102b7864e160988e011b01bb231ed9 | randterrainpy/terraindisplay.py | randterrainpy/terraindisplay.py | """Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
| """Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Canvas, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
"""Display a Terrain in 2D.
Args:
terrain (Terrain): Terrain to display.
"""
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
"""Make self child of a TK parent, then initialize own UI.
Args:
parent (TK): Parent to attach self to.
terrain (Terrain): Terrain to display.
"""
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
"""Initialize UI of window."""
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
self.draw_heights()
def draw_heights(self):
"""Draw grid of height values on window.
Heights are shown as squares, with greyscale colors becoming brighter for greater heights.
"""
canvas = Canvas(self)
# TODO: add code to draw squares here
canvas.pack(fill=BOTH, expand=1)
| Add empty draw_heights method and docstrings to Terrain2D | Add empty draw_heights method and docstrings to Terrain2D
| Python | mit | jackromo/RandTerrainPy | """Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
Add empty draw_heights method and docstrings to Terrain2D | """Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Canvas, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
"""Display a Terrain in 2D.
Args:
terrain (Terrain): Terrain to display.
"""
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
"""Make self child of a TK parent, then initialize own UI.
Args:
parent (TK): Parent to attach self to.
terrain (Terrain): Terrain to display.
"""
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
"""Initialize UI of window."""
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
self.draw_heights()
def draw_heights(self):
"""Draw grid of height values on window.
Heights are shown as squares, with greyscale colors becoming brighter for greater heights.
"""
canvas = Canvas(self)
# TODO: add code to draw squares here
canvas.pack(fill=BOTH, expand=1)
| <commit_before>"""Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
<commit_msg>Add empty draw_heights method and docstrings to Terrain2D<commit_after> | """Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Canvas, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
"""Display a Terrain in 2D.
Args:
terrain (Terrain): Terrain to display.
"""
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
"""Make self child of a TK parent, then initialize own UI.
Args:
parent (TK): Parent to attach self to.
terrain (Terrain): Terrain to display.
"""
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
"""Initialize UI of window."""
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
self.draw_heights()
def draw_heights(self):
"""Draw grid of height values on window.
Heights are shown as squares, with greyscale colors becoming brighter for greater heights.
"""
canvas = Canvas(self)
# TODO: add code to draw squares here
canvas.pack(fill=BOTH, expand=1)
| """Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
Add empty draw_heights method and docstrings to Terrain2D"""Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Canvas, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
"""Display a Terrain in 2D.
Args:
terrain (Terrain): Terrain to display.
"""
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
"""Make self child of a TK parent, then initialize own UI.
Args:
parent (TK): Parent to attach self to.
terrain (Terrain): Terrain to display.
"""
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
"""Initialize UI of window."""
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
self.draw_heights()
def draw_heights(self):
"""Draw grid of height values on window.
Heights are shown as squares, with greyscale colors becoming brighter for greater heights.
"""
canvas = Canvas(self)
# TODO: add code to draw squares here
canvas.pack(fill=BOTH, expand=1)
| <commit_before>"""Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
<commit_msg>Add empty draw_heights method and docstrings to Terrain2D<commit_after>"""Module for displaying Terrain, both in 2D and 3D.
(Not accessible outside of package; use display methods of Terrain instead.)
"""
from Tkinter import Tk, Canvas, Frame, BOTH
class Terrain2D(Frame):
"""2D graphical representation of a Terrain object.
Consists of a 2D top-down image of terrain as a grid of greyscale squares.
Each square corresponds to a height value, being on a scale from white if 1 to black if 0.
"""
DIMENSIONS = "250x150"
"""Dimensions of the window for a Terrain2D."""
@classmethod
def display_terrain(cls, terrain):
"""Display a Terrain in 2D.
Args:
terrain (Terrain): Terrain to display.
"""
root = Tk()
root.geometry(Terrain2D.DIMENSIONS)
app = Terrain2D(root, terrain)
root.mainloop()
def __init__(self, parent, terrain):
"""Make self child of a TK parent, then initialize own UI.
Args:
parent (TK): Parent to attach self to.
terrain (Terrain): Terrain to display.
"""
Frame.__init__(self, parent)
self.terrain = terrain
self.parent = parent
self.init_ui()
def init_ui(self):
"""Initialize UI of window."""
self.parent.title("Terrain (top-down)")
self.pack(fill=BOTH, expand=1)
self.draw_heights()
def draw_heights(self):
"""Draw grid of height values on window.
Heights are shown as squares, with greyscale colors becoming brighter for greater heights.
"""
canvas = Canvas(self)
# TODO: add code to draw squares here
canvas.pack(fill=BOTH, expand=1)
|
721f837cbfa0de8804def607908a9744b0d099a8 | asl/vendor/__init__.py | asl/vendor/__init__.py | import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = "."
if os.path.exists('./asl/asl/vendor'):
path = os.path.abspath('./asl/asl/vendor')
else:
for p in sys.path:
if os.path.exists(p + '/vendor/'):
path = os.path.abspath(p + '/vendor/')
break
if os.path.exists(p + '/asl/vendor/'):
path = os.path.abspath(p + '/asl/vendor/')
break
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
| import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = os.path.dirname(__file__)
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
| Fix of vendor directory search. | Fix of vendor directory search.
| Python | mit | AtteqCom/zsl,AtteqCom/zsl | import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = "."
if os.path.exists('./asl/asl/vendor'):
path = os.path.abspath('./asl/asl/vendor')
else:
for p in sys.path:
if os.path.exists(p + '/vendor/'):
path = os.path.abspath(p + '/vendor/')
break
if os.path.exists(p + '/asl/vendor/'):
path = os.path.abspath(p + '/asl/vendor/')
break
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
Fix of vendor directory search. | import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = os.path.dirname(__file__)
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
| <commit_before>import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = "."
if os.path.exists('./asl/asl/vendor'):
path = os.path.abspath('./asl/asl/vendor')
else:
for p in sys.path:
if os.path.exists(p + '/vendor/'):
path = os.path.abspath(p + '/vendor/')
break
if os.path.exists(p + '/asl/vendor/'):
path = os.path.abspath(p + '/asl/vendor/')
break
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
<commit_msg>Fix of vendor directory search.<commit_after> | import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = os.path.dirname(__file__)
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
| import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = "."
if os.path.exists('./asl/asl/vendor'):
path = os.path.abspath('./asl/asl/vendor')
else:
for p in sys.path:
if os.path.exists(p + '/vendor/'):
path = os.path.abspath(p + '/vendor/')
break
if os.path.exists(p + '/asl/vendor/'):
path = os.path.abspath(p + '/asl/vendor/')
break
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
Fix of vendor directory search.import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = os.path.dirname(__file__)
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
| <commit_before>import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = "."
if os.path.exists('./asl/asl/vendor'):
path = os.path.abspath('./asl/asl/vendor')
else:
for p in sys.path:
if os.path.exists(p + '/vendor/'):
path = os.path.abspath(p + '/vendor/')
break
if os.path.exists(p + '/asl/vendor/'):
path = os.path.abspath(p + '/asl/vendor/')
break
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
<commit_msg>Fix of vendor directory search.<commit_after>import sys
import os
_vendor_initialized = False
def append_paths(path, vendor_modules):
new_path = []
for v in vendor_modules:
new_path.append(path + os.sep + v)
sys.path = new_path + sys.path
def do_init():
global _vendor_initialized
if _vendor_initialized:
return
_vendor_initialized = True
path = os.path.dirname(__file__)
vendor_modules = ['injector', 'flask_injector', 'redis-py', 'sqlalchemy/sqlalchemy-0_9_1/lib', 'bpython/bpython', 'sphinxapi', 'simplejson']
append_paths(path, vendor_modules)
do_init()
|
75ce7463218609129151ea96fae4590763165961 | array/quick-sort.py | array/quick-sort.py | # Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
| # Sort list using recursion
def quick_sort(lst):
if len(lst) <= 1:
return lst
left = []
right = []
# compare first element in list to the rest
for i in lst[1:]:
if lst[i] < lst[0]:
left.append(i)
else:
right.append(i)
# recursion
return quick_sort(left) + lst[0:1] + quick_sort(right)
# test case
print quick_sort([5, 4, 3, 2, 1]) # [1, 2, 3, 4, 5]
| Debug and add test case | Debug and add test case
| Python | mit | derekmpham/interview-prep,derekmpham/interview-prep | # Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
Debug and add test case | # Sort list using recursion
def quick_sort(lst):
if len(lst) <= 1:
return lst
left = []
right = []
# compare first element in list to the rest
for i in lst[1:]:
if lst[i] < lst[0]:
left.append(i)
else:
right.append(i)
# recursion
return quick_sort(left) + lst[0:1] + quick_sort(right)
# test case
print quick_sort([5, 4, 3, 2, 1]) # [1, 2, 3, 4, 5]
| <commit_before># Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
<commit_msg>Debug and add test case<commit_after> | # Sort list using recursion
def quick_sort(lst):
if len(lst) <= 1:
return lst
left = []
right = []
# compare first element in list to the rest
for i in lst[1:]:
if lst[i] < lst[0]:
left.append(i)
else:
right.append(i)
# recursion
return quick_sort(left) + lst[0:1] + quick_sort(right)
# test case
print quick_sort([5, 4, 3, 2, 1]) # [1, 2, 3, 4, 5]
| # Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
Debug and add test case# Sort list using recursion
def quick_sort(lst):
if len(lst) <= 1:
return lst
left = []
right = []
# compare first element in list to the rest
for i in lst[1:]:
if lst[i] < lst[0]:
left.append(i)
else:
right.append(i)
# recursion
return quick_sort(left) + lst[0:1] + quick_sort(right)
# test case
print quick_sort([5, 4, 3, 2, 1]) # [1, 2, 3, 4, 5]
| <commit_before># Sort list using recursion
def quick_sort(lst):
if len(lst) == 0:
print []
left = []
right = []
pivot = lst[0]
# compare first element in list to the rest
for i in range(1, len(lst)):
if lst[i] < pivot:
left.append(lst[i])
else:
right.append(lst[i])
# recursion
print quick_sort(left) + pivot + quick_sort(right)
<commit_msg>Debug and add test case<commit_after># Sort list using recursion
def quick_sort(lst):
if len(lst) <= 1:
return lst
left = []
right = []
# compare first element in list to the rest
for i in lst[1:]:
if lst[i] < lst[0]:
left.append(i)
else:
right.append(i)
# recursion
return quick_sort(left) + lst[0:1] + quick_sort(right)
# test case
print quick_sort([5, 4, 3, 2, 1]) # [1, 2, 3, 4, 5]
|
d7219365197ff22aec44836e37af19f62420f996 | paystackapi/tests/test_tcontrol.py | paystackapi/tests/test_tcontrol.py | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
| import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
@httpretty.activate
def test_resend_otp(self):
"""Method defined to test resend_otp."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/transfer/resend_otp"),
content_type='text/json',
body='{"status": true, "message": "OTP has been resent"}',
status=201,
)
response = TransferControl.resend_otp(
transfer_code="TRF_vsyqdmlzble3uii",
reason="Just do it."
)
self.assertTrue(response['status'])
| Add test for transfer control resend otp | Add test for transfer control resend otp
| Python | mit | andela-sjames/paystack-python | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
Add test for transfer control resend otp | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
@httpretty.activate
def test_resend_otp(self):
"""Method defined to test resend_otp."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/transfer/resend_otp"),
content_type='text/json',
body='{"status": true, "message": "OTP has been resent"}',
status=201,
)
response = TransferControl.resend_otp(
transfer_code="TRF_vsyqdmlzble3uii",
reason="Just do it."
)
self.assertTrue(response['status'])
| <commit_before>import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
<commit_msg>Add test for transfer control resend otp<commit_after> | import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
@httpretty.activate
def test_resend_otp(self):
"""Method defined to test resend_otp."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/transfer/resend_otp"),
content_type='text/json',
body='{"status": true, "message": "OTP has been resent"}',
status=201,
)
response = TransferControl.resend_otp(
transfer_code="TRF_vsyqdmlzble3uii",
reason="Just do it."
)
self.assertTrue(response['status'])
| import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
Add test for transfer control resend otpimport httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
@httpretty.activate
def test_resend_otp(self):
"""Method defined to test resend_otp."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/transfer/resend_otp"),
content_type='text/json',
body='{"status": true, "message": "OTP has been resent"}',
status=201,
)
response = TransferControl.resend_otp(
transfer_code="TRF_vsyqdmlzble3uii",
reason="Just do it."
)
self.assertTrue(response['status'])
| <commit_before>import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
<commit_msg>Add test for transfer control resend otp<commit_after>import httpretty
from paystackapi.tests.base_test_case import BaseTestCase
from paystackapi.tcontrol import TransferControl
class TestTransfer(BaseTestCase):
@httpretty.activate
def test_check_balance(self):
"""Method defined to test check_balance."""
httpretty.register_uri(
httpretty.GET,
self.endpoint_url("/balance"),
content_type='text/json',
body='{"status": true, "message": "Balances retrieved"}',
status=201,
)
response = TransferControl.check_balance()
self.assertTrue(response['status'])
@httpretty.activate
def test_resend_otp(self):
"""Method defined to test resend_otp."""
httpretty.register_uri(
httpretty.POST,
self.endpoint_url("/transfer/resend_otp"),
content_type='text/json',
body='{"status": true, "message": "OTP has been resent"}',
status=201,
)
response = TransferControl.resend_otp(
transfer_code="TRF_vsyqdmlzble3uii",
reason="Just do it."
)
self.assertTrue(response['status'])
|
b682addf7d65dbb48ad5c0a3506987103ea43835 | miura/morph.py | miura/morph.py | import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature.split(',')[0],
'feature': feature,
})
node = node.next
return morphs
| import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature[0:feature.find(',')],
'feature': feature,
})
node = node.next
return morphs
| Use find instead of split | Use find instead of split
| Python | mit | unnonouno/mrep | import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature.split(',')[0],
'feature': feature,
})
node = node.next
return morphs
Use find instead of split | import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature[0:feature.find(',')],
'feature': feature,
})
node = node.next
return morphs
| <commit_before>import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature.split(',')[0],
'feature': feature,
})
node = node.next
return morphs
<commit_msg>Use find instead of split<commit_after> | import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature[0:feature.find(',')],
'feature': feature,
})
node = node.next
return morphs
| import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature.split(',')[0],
'feature': feature,
})
node = node.next
return morphs
Use find instead of splitimport MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature[0:feature.find(',')],
'feature': feature,
})
node = node.next
return morphs
| <commit_before>import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature.split(',')[0],
'feature': feature,
})
node = node.next
return morphs
<commit_msg>Use find instead of split<commit_after>import MeCab
class MeCabParser(object):
def __init__(self, arg=''):
self.model = MeCab.Model_create(arg)
def parse(self, s):
tagger = self.model.createTagger()
lattice = self.model.createLattice()
lattice.set_sentence(s)
tagger.parse(lattice)
node = lattice.bos_node()
morphs = []
while node:
if node.surface != '':
feature = node.feature
morphs.append({
'surface': node.surface,
'pos': feature[0:feature.find(',')],
'feature': feature,
})
node = node.next
return morphs
|
fce1a7e5c1466ef79c5387f4b9f0c231e745f380 | basics/candidate.py | basics/candidate.py |
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return [self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
|
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return np.array([self._y, self._x, self._major,
self._minor, self._pa])
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profile_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def find_shell_fraction(self, array, frac_thresh=0.05, grad_thresh=1,
**kwargs):
'''
Find the fraction of the bubble edge associated with a shell.
'''
shell_frac = 0
for prof in self.profiles_lines(array, **kwargs):
pass
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
| Return as array; start of finding shell fraction | Return as array; start of finding shell fraction
| Python | mit | e-koch/BaSiCs |
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return [self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
Return as array; start of finding shell fraction |
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return np.array([self._y, self._x, self._major,
self._minor, self._pa])
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profile_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def find_shell_fraction(self, array, frac_thresh=0.05, grad_thresh=1,
**kwargs):
'''
Find the fraction of the bubble edge associated with a shell.
'''
shell_frac = 0
for prof in self.profiles_lines(array, **kwargs):
pass
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
| <commit_before>
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return [self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
<commit_msg>Return as array; start of finding shell fraction<commit_after> |
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return np.array([self._y, self._x, self._major,
self._minor, self._pa])
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profile_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def find_shell_fraction(self, array, frac_thresh=0.05, grad_thresh=1,
**kwargs):
'''
Find the fraction of the bubble edge associated with a shell.
'''
shell_frac = 0
for prof in self.profiles_lines(array, **kwargs):
pass
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
|
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return [self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
Return as array; start of finding shell fraction
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return np.array([self._y, self._x, self._major,
self._minor, self._pa])
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profile_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def find_shell_fraction(self, array, frac_thresh=0.05, grad_thresh=1,
**kwargs):
'''
Find the fraction of the bubble edge associated with a shell.
'''
shell_frac = 0
for prof in self.profiles_lines(array, **kwargs):
pass
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
| <commit_before>
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return [self._y, self._x, self._major,
self._minor, self._pa]
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profiles_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
<commit_msg>Return as array; start of finding shell fraction<commit_after>
import numpy as np
class Bubble2D(object):
"""
Class for candidate bubble portions from 2D planes.
"""
def __init__(self, props):
super(Bubble2D, self).__init__()
self._y = props[0]
self._x = props[1]
self._major = props[2]
self._minor = props[3]
self._pa = props[4]
@property
def params(self):
return np.array([self._y, self._x, self._major,
self._minor, self._pa])
@property
def area(self):
return np.pi * self.major * self.minor
@property
def pa(self):
return self._pa
@property
def major(self):
return self._major
@property
def minor(self):
return self._minor
def profile_lines(self, array, **kwargs):
'''
Calculate radial profile lines of the 2D bubbles.
'''
from basics.profile import azimuthal_profiles
return azimuthal_profiles(array, self.params, **kwargs)
def find_shell_fraction(self, array, frac_thresh=0.05, grad_thresh=1,
**kwargs):
'''
Find the fraction of the bubble edge associated with a shell.
'''
shell_frac = 0
for prof in self.profiles_lines(array, **kwargs):
pass
def as_mask(self):
'''
Return a boolean mask of the 2D region.
'''
raise NotImplementedError()
def find_shape(self):
'''
Expand/contract to match the contours in the data.
'''
raise NotImplementedError()
|
bfa446d5fc399b685419ad00c376bcd9a13a8605 | mediacrush/decorators.py | mediacrush/decorators.py | from flask import jsonify, request
from functools import wraps
import json
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);" % (callback, jsonification.data) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
| from flask import jsonify, request
from functools import wraps
import json
jsonp_notice = """
// MediaCrush supports Cross Origin Resource Sharing requests.
// There is no reason to use JSONP; please use CORS instead.
// For more information, see https://mediacru.sh/docs/api"""
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);\n%s" % (callback, jsonification.data, jsonp_notice) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
| Add JSONP notice to API | Add JSONP notice to API | Python | mit | nerdzeu/NERDZCrush,MediaCrush/MediaCrush,roderickm/MediaCrush,roderickm/MediaCrush,nerdzeu/NERDZCrush,MediaCrush/MediaCrush,nerdzeu/NERDZCrush,roderickm/MediaCrush | from flask import jsonify, request
from functools import wraps
import json
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);" % (callback, jsonification.data) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
Add JSONP notice to API | from flask import jsonify, request
from functools import wraps
import json
jsonp_notice = """
// MediaCrush supports Cross Origin Resource Sharing requests.
// There is no reason to use JSONP; please use CORS instead.
// For more information, see https://mediacru.sh/docs/api"""
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);\n%s" % (callback, jsonification.data, jsonp_notice) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
| <commit_before>from flask import jsonify, request
from functools import wraps
import json
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);" % (callback, jsonification.data) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
<commit_msg>Add JSONP notice to API<commit_after> | from flask import jsonify, request
from functools import wraps
import json
jsonp_notice = """
// MediaCrush supports Cross Origin Resource Sharing requests.
// There is no reason to use JSONP; please use CORS instead.
// For more information, see https://mediacru.sh/docs/api"""
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);\n%s" % (callback, jsonification.data, jsonp_notice) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
| from flask import jsonify, request
from functools import wraps
import json
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);" % (callback, jsonification.data) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
Add JSONP notice to APIfrom flask import jsonify, request
from functools import wraps
import json
jsonp_notice = """
// MediaCrush supports Cross Origin Resource Sharing requests.
// There is no reason to use JSONP; please use CORS instead.
// For more information, see https://mediacru.sh/docs/api"""
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);\n%s" % (callback, jsonification.data, jsonp_notice) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
| <commit_before>from flask import jsonify, request
from functools import wraps
import json
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);" % (callback, jsonification.data) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
<commit_msg>Add JSONP notice to API<commit_after>from flask import jsonify, request
from functools import wraps
import json
jsonp_notice = """
// MediaCrush supports Cross Origin Resource Sharing requests.
// There is no reason to use JSONP; please use CORS instead.
// For more information, see https://mediacru.sh/docs/api"""
def json_output(f):
@wraps(f)
def wrapper(*args, **kwargs):
def jsonify_wrap(obj):
callback = request.args.get('callback', False)
jsonification = jsonify(obj)
if callback:
jsonification.data = "%s(%s);\n%s" % (callback, jsonification.data, jsonp_notice) # Alter the response
return jsonification
result = f(*args, **kwargs)
if isinstance(result, tuple):
return jsonify_wrap(result[0]), result[1]
return jsonify_wrap(result)
return wrapper
def cors(f):
@wraps(f)
def wrapper(*args, **kwargs):
res = f(*args, **kwargs)
if request.headers.get('x-cors-status', False):
if isinstance(res, tuple):
json_text = res[0].data
code = res[1]
else:
json_text = res.data
code = 200
o = json.loads(json_text)
o['x-status'] = code
return jsonify(o)
return res
return wrapper
|
a45f729d803d32cedd1b511c55c11ba53940c698 | server/lib/stl_tools.py | server/lib/stl_tools.py | import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal)) + abs(float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
| import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal) - float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
| Fix calculation of model size | Fix calculation of model size
| Python | agpl-3.0 | MakersLab/custom-print | import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal)) + abs(float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
Fix calculation of model size | import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal) - float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
| <commit_before>import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal)) + abs(float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
<commit_msg>Fix calculation of model size<commit_after> | import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal) - float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
| import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal)) + abs(float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
Fix calculation of model sizeimport subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal) - float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
| <commit_before>import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal)) + abs(float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
<commit_msg>Fix calculation of model size<commit_after>import subprocess
import re
import os
from lib.utils import loadYaml
config = loadYaml('../config.yml')
dimensionsRegex = r'{type} = +(\-?\d+\.\d+)'
types = {
'x': ['Min X', 'Max X'],
'y': ['Min Y', 'Max Y'],
'z': ['Min Z', 'Max Z'],
}
def analyzeSTL(path, fileName):
command = subprocess.Popen('{ADMeshExecutable} {stlFilePath}'.format(
ADMeshExecutable=config['ADMesh-executable'],
stlFilePath = os.path.join(path, config['stl-upload-directory'], fileName)
), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, err = command.communicate()
output = output.decode(config['terminal-encoding'])
dimensions = {}
for type in types:
try:
firstVal = re.findall(dimensionsRegex.format(type=types[type][0]), output)[0]
secondVal = re.findall(dimensionsRegex.format(type=types[type][1]), output)[0]
dimensions[type] = abs(float(firstVal) - float(secondVal))
except IndexError as e:
print('unable to decode', output)
raise e
return dimensions
|
b5db32721780c168cd7e2f5915dd4256fb9f9018 | board/pagination.py | board/pagination.py | from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return list(range(start, end + 1))
| from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return range(start, end + 1)
| Return iterator instead of list in paginator | Return iterator instead of list in paginator
| Python | mit | devunt/hydrocarbon,devunt/hydrocarbon,devunt/hydrocarbon | from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return list(range(start, end + 1))
Return iterator instead of list in paginator | from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return range(start, end + 1)
| <commit_before>from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return list(range(start, end + 1))
<commit_msg>Return iterator instead of list in paginator<commit_after> | from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return range(start, end + 1)
| from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return list(range(start, end + 1))
Return iterator instead of list in paginatorfrom django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return range(start, end + 1)
| <commit_before>from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return list(range(start, end + 1))
<commit_msg>Return iterator instead of list in paginator<commit_after>from django.core.paginator import Paginator
class HCPaginator(Paginator):
adjacent_pages = 5
def page(self, number):
self.page_num = number
return super().page(number)
@property
def page_range(self):
start = max(1, self.page_num - self.adjacent_pages)
end = min(self.num_pages, self.page_num + self.adjacent_pages)
if (end - start) < (self.adjacent_pages * 2):
if start == 1:
end += self.adjacent_pages - (self.page_num - start)
if end == self.num_pages:
start -= self.adjacent_pages - (end - self.page_num)
return range(start, end + 1)
|
30dd3c8436ebe69aff2956322312072e8ab581f0 | example/tests/test_fields.py | example/tests/test_fields.py | # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.cart import BaseCartItem
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = BaseCartItem.objects.create(extra=json_obj)
new_obj = BaseCartItem.objects.get(id=obj.id)
self.assertEqual(new_obj.json, json_obj)
| # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.defaults.customer import Customer
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = Customer.objects.create(extra=json_obj)
new_obj = Customer.objects.get(id=obj.id)
self.assertEqual(new_obj.extra, json_obj)
| Update model used to test | Update model used to test
| Python | bsd-3-clause | jrief/django-shop,khchine5/django-shop,khchine5/django-shop,awesto/django-shop,divio/django-shop,khchine5/django-shop,nimbis/django-shop,jrief/django-shop,khchine5/django-shop,nimbis/django-shop,divio/django-shop,jrief/django-shop,nimbis/django-shop,nimbis/django-shop,awesto/django-shop,awesto/django-shop,jrief/django-shop,divio/django-shop | # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.cart import BaseCartItem
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = BaseCartItem.objects.create(extra=json_obj)
new_obj = BaseCartItem.objects.get(id=obj.id)
self.assertEqual(new_obj.json, json_obj)
Update model used to test | # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.defaults.customer import Customer
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = Customer.objects.create(extra=json_obj)
new_obj = Customer.objects.get(id=obj.id)
self.assertEqual(new_obj.extra, json_obj)
| <commit_before># -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.cart import BaseCartItem
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = BaseCartItem.objects.create(extra=json_obj)
new_obj = BaseCartItem.objects.get(id=obj.id)
self.assertEqual(new_obj.json, json_obj)
<commit_msg>Update model used to test<commit_after> | # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.defaults.customer import Customer
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = Customer.objects.create(extra=json_obj)
new_obj = Customer.objects.get(id=obj.id)
self.assertEqual(new_obj.extra, json_obj)
| # -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.cart import BaseCartItem
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = BaseCartItem.objects.create(extra=json_obj)
new_obj = BaseCartItem.objects.get(id=obj.id)
self.assertEqual(new_obj.json, json_obj)
Update model used to test# -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.defaults.customer import Customer
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = Customer.objects.create(extra=json_obj)
new_obj = Customer.objects.get(id=obj.id)
self.assertEqual(new_obj.extra, json_obj)
| <commit_before># -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.cart import BaseCartItem
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = BaseCartItem.objects.create(extra=json_obj)
new_obj = BaseCartItem.objects.get(id=obj.id)
self.assertEqual(new_obj.json, json_obj)
<commit_msg>Update model used to test<commit_after># -*- coding: utf-8
from __future__ import unicode_literals
from django.test import TestCase
from shop.models.defaults.customer import Customer
class JSONFieldTest(TestCase):
"""JSONField Wrapper Tests"""
def test_json_field_create(self):
"""Test saving a JSON object in our JSONField"""
json_obj = {
"item_1": "this is a json blah",
"blergh": "hey, hey, hey"}
obj = Customer.objects.create(extra=json_obj)
new_obj = Customer.objects.get(id=obj.id)
self.assertEqual(new_obj.extra, json_obj)
|
b0a94dc2f696464db999e652b4a9dbdaf96f8532 | backend/talks/forms.py | backend/talks/forms.py | from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
| from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
| Mark conference and language as required | Mark conference and language as required
| Python | mit | patrick91/pycon,patrick91/pycon | from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
Mark conference and language as required | from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
| <commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
<commit_msg>Mark conference and language as required<commit_after> | from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
| from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
Mark conference and language as requiredfrom django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
| <commit_before>from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code')
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code')
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data['conference']
if not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
<commit_msg>Mark conference and language as required<commit_after>from django import forms
from django.utils.translation import ugettext_lazy as _
from api.forms import GrapheneModelForm
from languages.models import Language
from conferences.models import Conference
from .models import Talk
class ProposeTalkForm(GrapheneModelForm):
conference = forms.ModelChoiceField(queryset=Conference.objects.all(), to_field_name='code', required=True)
language = forms.ModelChoiceField(queryset=Language.objects.all(), to_field_name='code', required=True)
def clean(self):
cleaned_data = super().clean()
conference = cleaned_data.get('conference')
if conference and not conference.is_cfp_open:
raise forms.ValidationError(_('The call for papers is not open!'))
def save(self, commit=True):
self.instance.owner = self.context.user
return super().save(commit=commit)
class Meta:
model = Talk
fields = ('title', 'abstract', 'topic', 'language', 'conference')
|
fb14ac15bcb1db14f756e8943f551ba428de4c7f | dusty/systems/known_hosts/__init__.py | dusty/systems/known_hosts/__init__.py | import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
| import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
if not os.path.exists(known_hosts_path):
open(known_hosts_path, 'a+').close()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
| Create required known_hosts file if it does not exists | Create required known_hosts file if it does not exists
| Python | mit | gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty | import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
Create required known_hosts file if it does not exists | import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
if not os.path.exists(known_hosts_path):
open(known_hosts_path, 'a+').close()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
| <commit_before>import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
<commit_msg>Create required known_hosts file if it does not exists<commit_after> | import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
if not os.path.exists(known_hosts_path):
open(known_hosts_path, 'a+').close()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
| import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
Create required known_hosts file if it does not existsimport os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
if not os.path.exists(known_hosts_path):
open(known_hosts_path, 'a+').close()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
| <commit_before>import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
<commit_msg>Create required known_hosts file if it does not exists<commit_after>import os
import logging
from ...subprocess import check_output
def _get_known_hosts_path():
ssh_dir = os.path.expanduser('~root/.ssh')
if not os.path.isdir(ssh_dir):
os.makedirs(ssh_dir)
return os.path.join(ssh_dir, 'known_hosts')
def ensure_known_hosts(hosts):
known_hosts_path = _get_known_hosts_path()
if not os.path.exists(known_hosts_path):
open(known_hosts_path, 'a+').close()
modified = False
with open(known_hosts_path, 'r+') as f:
contents = f.read()
if not contents.endswith('\n'):
contents += '\n'
for host in hosts:
if host not in contents:
logging.info('Adding {} ssh key to roots ssh known_hosts file'.format(host))
command = ['sh', '-c', 'ssh-keyscan -t rsa {}'.format(host)]
result = check_output(command, demote=False)
contents += result
modified = True
if modified:
f.seek(0)
f.write(contents)
|
4cb455890b7afa4f44da9d96a5c9820598731b36 | sedlex/AddGitHubIssueVisitor.py | sedlex/AddGitHubIssueVisitor.py | # -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
| # -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
| Fix missing githubIssue field when the corresponding issue already existed. | Fix missing githubIssue field when the corresponding issue already existed.
| Python | agpl-3.0 | Legilibre/SedLex | # -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
Fix missing githubIssue field when the corresponding issue already existed. | # -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
| <commit_before># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
<commit_msg>Fix missing githubIssue field when the corresponding issue already existed.<commit_after> | # -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
| # -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
Fix missing githubIssue field when the corresponding issue already existed.# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
| <commit_before># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
<commit_msg>Fix missing githubIssue field when the corresponding issue already existed.<commit_after># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + '\nGitHub: #' + str(self.current_issue)
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
7503ebff7ded94a52ed5bb6e0a72935071576e20 | tests/test_default.py | tests/test_default.py | def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
| def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
def test_confluence_group(Group):
group = Group("confluence")
assert group.exists
| Add a testinfra group example | Add a testinfra group example
| Python | apache-2.0 | telstra-digital/ansible-role-confluence,telstra-digital/ansible-role-confluence | def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
Add a testinfra group example | def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
def test_confluence_group(Group):
group = Group("confluence")
assert group.exists
| <commit_before>def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
<commit_msg>Add a testinfra group example<commit_after> | def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
def test_confluence_group(Group):
group = Group("confluence")
assert group.exists
| def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
Add a testinfra group exampledef test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
def test_confluence_group(Group):
group = Group("confluence")
assert group.exists
| <commit_before>def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
<commit_msg>Add a testinfra group example<commit_after>def test_confluence_user(User):
user = User("confluence")
assert user.exists
assert user.group == "confluence"
def test_confluence_group(Group):
group = Group("confluence")
assert group.exists
|
843601dbd89d7ac99b128684bb19e9363e867b8a | tests/test_stumpff.py | tests/test_stumpff.py | from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose, assert_equal
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
| from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
| Use numpy.testing.assert_allclose with small relative tolerance | test: Use numpy.testing.assert_allclose with small relative tolerance
To avoid having numpy.testing.assert_equal fail in some cases at the
level of machine precision, use numpy.testing.assert_allclose instead
with a relative tolerance set small, 1e-10, to still be restrictive in
the level of equality accepted.
| Python | mit | poliastro/poliastro | from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose, assert_equal
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
test: Use numpy.testing.assert_allclose with small relative tolerance
To avoid having numpy.testing.assert_equal fail in some cases at the
level of machine precision, use numpy.testing.assert_allclose instead
with a relative tolerance set small, 1e-10, to still be restrictive in
the level of equality accepted. | from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
| <commit_before>from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose, assert_equal
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
<commit_msg>test: Use numpy.testing.assert_allclose with small relative tolerance
To avoid having numpy.testing.assert_equal fail in some cases at the
level of machine precision, use numpy.testing.assert_allclose instead
with a relative tolerance set small, 1e-10, to still be restrictive in
the level of equality accepted.<commit_after> | from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
| from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose, assert_equal
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
test: Use numpy.testing.assert_allclose with small relative tolerance
To avoid having numpy.testing.assert_equal fail in some cases at the
level of machine precision, use numpy.testing.assert_allclose instead
with a relative tolerance set small, 1e-10, to still be restrictive in
the level of equality accepted.from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
| <commit_before>from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose, assert_equal
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_equal(c2(psi), expected_c2)
assert_equal(c3(psi), expected_c3)
<commit_msg>test: Use numpy.testing.assert_allclose with small relative tolerance
To avoid having numpy.testing.assert_equal fail in some cases at the
level of machine precision, use numpy.testing.assert_allclose instead
with a relative tolerance set small, 1e-10, to still be restrictive in
the level of equality accepted.<commit_after>from numpy import cos, cosh, sin, sinh
from numpy.testing import assert_allclose
from poliastro._math.special import stumpff_c2 as c2, stumpff_c3 as c3
def test_stumpff_functions_near_zero():
psi = 0.5
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2)
assert_allclose(c3(psi), expected_c3)
def test_stumpff_functions_above_zero():
psi = 3.0
expected_c2 = (1 - cos(psi**0.5)) / psi
expected_c3 = (psi**0.5 - sin(psi**0.5)) / psi**1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
def test_stumpff_functions_under_zero():
psi = -3.0
expected_c2 = (cosh((-psi) ** 0.5) - 1) / (-psi)
expected_c3 = (sinh((-psi) ** 0.5) - (-psi) ** 0.5) / (-psi) ** 1.5
assert_allclose(c2(psi), expected_c2, rtol=1e-10)
assert_allclose(c3(psi), expected_c3, rtol=1e-10)
|
62f6e116306901aedaa738236075c4faa00db74d | tests/config_test.py | tests/config_test.py | #!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
| Fix module path (config -> config_yaml) to unbreak test. | Fix module path (config -> config_yaml) to unbreak test.
| Python | apache-2.0 | mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher,mbrukman/cloud-launcher | #!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
Fix module path (config -> config_yaml) to unbreak test. | #!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix module path (config -> config_yaml) to unbreak test.<commit_after> | #!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
Fix module path (config -> config_yaml) to unbreak test.#!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix module path (config -> config_yaml) to unbreak test.<commit_after>#!/usr/bin/python
#
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
################################################################################
#
# Tests handling of VM images (e.g., shortnames).
import glob
import json
import os
import unittest
# Local imports
import config_yaml
class ConfigExpanderTest(unittest.TestCase):
def testAllFiles(self):
in_yaml = glob.glob(os.path.join('testdata', '*.in.yaml'))
for input_file in in_yaml:
expected = input_file.replace('in.yaml', 'out.json')
with open(expected) as expected_in:
expected_json = json.loads(expected_in.read(), encoding='utf-8')
expander = config_yaml.ConfigExpander(project='dummy-project', zone='dummy-zone')
actual_json = expander.ExpandFile(input_file)
self.assertEqual(expected_json, actual_json)
if __name__ == '__main__':
unittest.main()
|
c47b2d88fce9f890e7356288faf097cf4a97f0b8 | simplesqlite/_logger/_logger.py | simplesqlite/_logger/_logger.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
| Add check for logging state | Add check for logging state
| Python | mit | thombashi/SimpleSQLite,thombashi/SimpleSQLite | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
Add check for logging state | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
<commit_msg>Add check for logging state<commit_after> | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
Add check for logging state# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
<commit_msg>Add check for logging state<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
import sqliteschema
import tabledata
from ._null_logger import NullLogger
MODULE_NAME = "simplesqlite"
_is_enable = False
try:
from loguru import logger
logger.disable(MODULE_NAME)
except ImportError:
logger = NullLogger()
def set_logger(is_enable):
global _is_enable
if is_enable == _is_enable:
return
if is_enable:
logger.enable(MODULE_NAME)
else:
logger.disable(MODULE_NAME)
tabledata.set_logger(is_enable)
sqliteschema.set_logger(is_enable)
try:
import pytablereader
pytablereader.set_logger(is_enable)
except ImportError:
pass
def set_log_level(log_level):
# deprecated
return
|
c784fb30beac7abe958867345161f74876ca940d | causalinfo/__init__.py | causalinfo/__init__.py | from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Attributes without boilerplate."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
| from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Information Measures on Causal Graphs."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
| Fix silly boiler plate copy issue. | Fix silly boiler plate copy issue.
| Python | mit | brettc/causalinfo | from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Attributes without boilerplate."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
Fix silly boiler plate copy issue. | from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Information Measures on Causal Graphs."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
| <commit_before>from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Attributes without boilerplate."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
<commit_msg>Fix silly boiler plate copy issue.<commit_after> | from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Information Measures on Causal Graphs."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
| from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Attributes without boilerplate."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
Fix silly boiler plate copy issue.from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Information Measures on Causal Graphs."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
| <commit_before>from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Attributes without boilerplate."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
<commit_msg>Fix silly boiler plate copy issue.<commit_after>from .probability import (
vs,
Variable,
make_variables,
UniformDist,
JointDist,
JointDistByState
)
from .network import CausalGraph, Equation
from .measure import MeasureCause, MeasureSuccess
from .payoff import PayoffMatrix
import equations
__version__ = "0.1.0"
__title__ = "causalinfo"
__description__ = "Information Measures on Causal Graphs."
__uri__ = "http://github/brettc/causalinfo/"
__author__ = "Brett Calcott"
__email__ = "brett.calcott@gmail.com"
__license__ = "MIT"
__copyright__ = "Copyright (c) 2015 Brett Calcott"
__all__ = [
"CausalGraph",
"Equation",
"vs",
"Variable",
"make_variables",
"UniformDist",
"JointDist",
"JointDistByState",
"MeasureCause",
"MeasureSuccess",
"PayoffMatrix",
"equations",
]
|
8c203bfb28e027e4fdd490096296f712c3afd28e | consulrest/keyvalue.py | consulrest/keyvalue.py | import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
| import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
elif r.status_code == 404:
return None
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list_keys(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
| Return None if key is not found | Return None if key is not found
| Python | mit | vcoque/consul-ri | import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
Return None if key is not found | import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
elif r.status_code == 404:
return None
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list_keys(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
| <commit_before>import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
<commit_msg>Return None if key is not found<commit_after> | import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
elif r.status_code == 404:
return None
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list_keys(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
| import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
Return None if key is not foundimport json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
elif r.status_code == 404:
return None
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list_keys(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
| <commit_before>import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
<commit_msg>Return None if key is not found<commit_after>import json
import re
import requests
class KeyValue(object):
def __init__(self, url):
self._url = "%s/kv" % url
def _get(self, key, recurse=None, keys=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
if keys is not None:
params['keys'] = True
r = requests.get(url, params=params)
if r.ok:
return json.loads(r.text)
elif r.status_code == 404:
return None
else:
r.raise_for_status()
def get(self, key, recurse=None):
return self._get(key, recurse=recurse)
def list_keys(self, key=''):
return self._get(key, keys=True)
def set(self, key, value, cas=None):
params = dict()
if cas is not None:
params['cas'] = cas
r = requests.put(self._url + '/' + key, data=value, params=params)
if r.ok:
if re.match(r"true", r.text) is not None:
return True
elif re.match(r"false", r.text) is not None:
return False
else:
r.raise_for_status()
def delete(self, key, recurse=None):
url = self._url + '/' + key
params = dict()
if recurse is not None:
params['recurse'] = True
r = requests.delete(url, params=params)
r.raise_for_status()
|
a013927ee9772e05ae4255cff98ecfe4819f205c | flask_app/__init__.py | flask_app/__init__.py | """
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
| """
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
| Set login view for login_required | Set login view for login_required
| Python | mit | szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft,szeestraten/kidsakoder-minecraft | """
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
Set login view for login_required | """
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
| <commit_before>"""
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
<commit_msg>Set login view for login_required<commit_after> | """
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
| """
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
Set login view for login_required"""
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
| <commit_before>"""
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
<commit_msg>Set login view for login_required<commit_after>"""
The flask application package.
"""
from flask import Flask
from flask.ext import login
import models
app = Flask(__name__)
# Flask-Login initialization
login_manager = login.LoginManager()
login_manager.init_app(app)
login_manager.login_view = 'login'
@login_manager.user_loader
def load_user(user_id):
return models.User.get(user_id)
# See configuration.py for possible configuration objects
app.config.from_object('flask_app.configuration.Development')
import flask_app.database
import flask_app.views
|
9698e531ffd528b6b56e285f5cf8087aa06d4a02 | test/conftest.py | test/conftest.py | import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
| import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
| Add fixture for Namespace specifically. | Add fixture for Namespace specifically.
| Python | mit | mwchase/class-namespaces,mwchase/class-namespaces | import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
Add fixture for Namespace specifically. | import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
| <commit_before>import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
<commit_msg>Add fixture for Namespace specifically.<commit_after> | import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
| import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
Add fixture for Namespace specifically.import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
| <commit_before>import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
<commit_msg>Add fixture for Namespace specifically.<commit_after>import pytest
@pytest.fixture
def namespaces():
import class_namespaces
return class_namespaces
@pytest.fixture
def namespace(namespaces):
return namespaces.Namespace
@pytest.fixture
def compat():
import class_namespaces.compat
return class_namespaces.compat
@pytest.fixture
def abc():
import class_namespaces.compat.abc
return class_namespaces.compat.abc
|
a9fd1154c99d1377e0da5127762d6248f3c9f81f | github3/gists/file.py | github3/gists/file.py | # -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
import requests
from ..models import GitHubObject
class GistFile(GitHubObject):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
r = requests.get(self.raw_url)
self.content = r.text
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
| # -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
from ..models import GitHubCore
class GistFile(GitHubCore):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
self.content = self.get_content(attributes)
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
def get_content(self, attributes):
"""Retrieve contents of file."""
content = attributes.get('content')
if not content:
response = self._get(self.raw_url)
if self._boolean(response, 200, 404):
content = response.content
return content
| Load content from raw_url if empty | Load content from raw_url if empty
| Python | bsd-3-clause | balloob/github3.py,christophelec/github3.py,sigmavirus24/github3.py | # -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
import requests
from ..models import GitHubObject
class GistFile(GitHubObject):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
r = requests.get(self.raw_url)
self.content = r.text
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
Load content from raw_url if empty | # -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
from ..models import GitHubCore
class GistFile(GitHubCore):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
self.content = self.get_content(attributes)
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
def get_content(self, attributes):
"""Retrieve contents of file."""
content = attributes.get('content')
if not content:
response = self._get(self.raw_url)
if self._boolean(response, 200, 404):
content = response.content
return content
| <commit_before># -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
import requests
from ..models import GitHubObject
class GistFile(GitHubObject):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
r = requests.get(self.raw_url)
self.content = r.text
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
<commit_msg>Load content from raw_url if empty<commit_after> | # -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
from ..models import GitHubCore
class GistFile(GitHubCore):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
self.content = self.get_content(attributes)
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
def get_content(self, attributes):
"""Retrieve contents of file."""
content = attributes.get('content')
if not content:
response = self._get(self.raw_url)
if self._boolean(response, 200, 404):
content = response.content
return content
| # -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
import requests
from ..models import GitHubObject
class GistFile(GitHubObject):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
r = requests.get(self.raw_url)
self.content = r.text
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
Load content from raw_url if empty# -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
from ..models import GitHubCore
class GistFile(GitHubCore):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
self.content = self.get_content(attributes)
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
def get_content(self, attributes):
"""Retrieve contents of file."""
content = attributes.get('content')
if not content:
response = self._get(self.raw_url)
if self._boolean(response, 200, 404):
content = response.content
return content
| <commit_before># -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
import requests
from ..models import GitHubObject
class GistFile(GitHubObject):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
r = requests.get(self.raw_url)
self.content = r.text
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
<commit_msg>Load content from raw_url if empty<commit_after># -*- coding: utf-8 -*-
"""
github3.gists.file
------------------
Module containing the logic for the GistFile object.
"""
from __future__ import unicode_literals
from ..models import GitHubCore
class GistFile(GitHubCore):
"""This represents the file object returned by interacting with gists.
It stores the raw url of the file, the file name, language, size and
content.
"""
def _update_attributes(self, attributes):
#: The raw URL for the file at GitHub.
self.raw_url = attributes.get('raw_url')
#: The name of the file.
self.filename = attributes.get('filename')
#: The name of the file.
self.name = attributes.get('filename')
#: The language associated with the file.
self.language = attributes.get('language')
#: The size of the file.
self.size = attributes.get('size')
#: The content of the file.
self.content = self.get_content(attributes)
def _repr(self):
return '<Gist File [{0}]>'.format(self.name)
def get_content(self, attributes):
"""Retrieve contents of file."""
content = attributes.get('content')
if not content:
response = self._get(self.raw_url)
if self._boolean(response, 200, 404):
content = response.content
return content
|
b26614d3f29824e9c5ec0663f09855074f754ddf | globus_sdk/version.py | globus_sdk/version.py | # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.0"
| # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.1"
| Update to v1.6.1 for release | Update to v1.6.1 for release
- Replace egg distribution format with wheels (#314)
- Internal maintenance
| Python | apache-2.0 | sirosen/globus-sdk-python,globusonline/globus-sdk-python,globus/globus-sdk-python,globus/globus-sdk-python | # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.0"
Update to v1.6.1 for release
- Replace egg distribution format with wheels (#314)
- Internal maintenance | # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.1"
| <commit_before># single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.0"
<commit_msg>Update to v1.6.1 for release
- Replace egg distribution format with wheels (#314)
- Internal maintenance<commit_after> | # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.1"
| # single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.0"
Update to v1.6.1 for release
- Replace egg distribution format with wheels (#314)
- Internal maintenance# single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.1"
| <commit_before># single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.0"
<commit_msg>Update to v1.6.1 for release
- Replace egg distribution format with wheels (#314)
- Internal maintenance<commit_after># single source of truth for package version,
# see https://packaging.python.org/en/latest/single_source_version/
__version__ = "1.6.1"
|
d6f13599f47ff4b4926d07d79962d3fff36ab6c4 | gradebook/__init__.py | gradebook/__init__.py | from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
| from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
csv_file = os.path.join(data_home, 'grades.csv')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
| Add placeholder for CSV file | Add placeholder for CSV file
| Python | bsd-2-clause | jarrodmillman/gradebook | from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
Add placeholder for CSV file | from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
csv_file = os.path.join(data_home, 'grades.csv')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
| <commit_before>from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
<commit_msg>Add placeholder for CSV file<commit_after> | from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
csv_file = os.path.join(data_home, 'grades.csv')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
| from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
Add placeholder for CSV filefrom collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
csv_file = os.path.join(data_home, 'grades.csv')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
| <commit_before>from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
<commit_msg>Add placeholder for CSV file<commit_after>from collections import OrderedDict
import os
import sys
import json
try:
gb_home = os.environ["GB_HOME"]
except KeyError:
raise RuntimeError("Please set the environment variable GB_HOME")
repo_dir = os.path.join(gb_home, 'repos')
data_dir = os.path.join(gb_home, 'data')
log_dir = os.path.join(gb_home, 'log')
grade_file = 'grades.json'
instructor_home = os.path.join(repo_dir, 'instructor', 'assignments')
student_grades = grade_file
class_grades = os.path.join(data_dir, grade_file)
config_file = os.path.join(data_dir, 'config.json')
csv_file = os.path.join(data_home, 'grades.csv')
class_log = os.path.join(log_dir, 'grade.log')
def get_grades(filename=class_grades):
try:
with open(filename) as infile:
grades = json.load(infile, object_pairs_hook=OrderedDict)
except:
print("Trouble loading " + filename)
sys.exit(1)
return grades
def save_grades(content, filename):
with open(filename, 'w') as outfile:
json.dump(content, outfile, indent=4)
grades = get_grades()
|
465a13cad35b62a0fc64768ec6b2aed0573566da | ubersmith/__init__.py | ubersmith/__init__.py |
__all__ = [
'api',
'calls',
'entities',
]
|
__all__ = [
'api',
'calls',
'entities',
'exceptions',
'utils',
]
| Add new modules to package init. | Add new modules to package init.
| Python | mit | jasonkeene/python-ubersmith,jasonkeene/python-ubersmith,hivelocity/python-ubersmith,hivelocity/python-ubersmith |
__all__ = [
'api',
'calls',
'entities',
]
Add new modules to package init. |
__all__ = [
'api',
'calls',
'entities',
'exceptions',
'utils',
]
| <commit_before>
__all__ = [
'api',
'calls',
'entities',
]
<commit_msg>Add new modules to package init.<commit_after> |
__all__ = [
'api',
'calls',
'entities',
'exceptions',
'utils',
]
|
__all__ = [
'api',
'calls',
'entities',
]
Add new modules to package init.
__all__ = [
'api',
'calls',
'entities',
'exceptions',
'utils',
]
| <commit_before>
__all__ = [
'api',
'calls',
'entities',
]
<commit_msg>Add new modules to package init.<commit_after>
__all__ = [
'api',
'calls',
'entities',
'exceptions',
'utils',
]
|
cf5fb07651099e38e6487eae641da07feda40b05 | numba/tests/test_api.py | numba/tests/test_api.py | import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
| import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
| Add testcases for jit and njit with forceobj and nopython | Add testcases for jit and njit with forceobj and nopython
| Python | bsd-2-clause | numba/numba,cpcloud/numba,seibert/numba,stuartarchibald/numba,sklam/numba,seibert/numba,stonebig/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,IntelLabs/numba,IntelLabs/numba,numba/numba,stonebig/numba,stonebig/numba,IntelLabs/numba,seibert/numba,stonebig/numba,gmarkall/numba,numba/numba,sklam/numba,IntelLabs/numba,stuartarchibald/numba,seibert/numba,seibert/numba,sklam/numba,stonebig/numba,sklam/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,stuartarchibald/numba,cpcloud/numba,gmarkall/numba,gmarkall/numba,gmarkall/numba,cpcloud/numba,gmarkall/numba,sklam/numba,numba/numba | import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
Add testcases for jit and njit with forceobj and nopython | import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
| <commit_before>import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
<commit_msg>Add testcases for jit and njit with forceobj and nopython<commit_after> | import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
| import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
Add testcases for jit and njit with forceobj and nopythonimport numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
| <commit_before>import numba
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
if __name__ == '__main__':
unittest.main()
<commit_msg>Add testcases for jit and njit with forceobj and nopython<commit_after>import numba
from numba import jit, njit
from numba.tests.support import TestCase
import unittest
class TestNumbaModule(TestCase):
"""
Test the APIs exposed by the top-level `numba` module.
"""
def check_member(self, name):
self.assertTrue(hasattr(numba, name), name)
self.assertIn(name, numba.__all__)
def test_numba_module(self):
# jit
self.check_member("jit")
self.check_member("vectorize")
self.check_member("guvectorize")
self.check_member("njit")
# errors
self.check_member("NumbaError")
self.check_member("TypingError")
# types
self.check_member("int32")
# misc
numba.__version__ # not in __all__
class TestJitDecorator(TestCase):
"""
Test the jit and njit decorators
"""
def test_jit_nopython_forceobj(self):
with self.assertRaises(ValueError):
jit(nopython=True, forceobj=True)
def py_func(x):
return(x)
jit_func = jit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = jit(forceobj=True)(py_func)
jit_func(1)
self.assertFalse(jit_func.nopython_signatures)
def test_njit_nopython_forceobj(self):
with self.assertWarns(RuntimeWarning):
njit(forceobj=True)
with self.assertWarns(RuntimeWarning):
njit(nopython=True)
def py_func(x):
return(x)
jit_func = njit(nopython=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
jit_func = njit(forceobj=True)(py_func)
jit_func(1)
self.assertTrue(jit_func.nopython_signatures)
if __name__ == '__main__':
unittest.main()
|
95245bb7fab6efe5a72cb8abbf4380a26b72a720 | corehq/apps/hqwebapp/middleware.py | corehq/apps/hqwebapp/middleware.py | import logging
from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from django.conf import settings
logger = logging.getLogger('')
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
logger.error(warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
| from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from corehq.util.soft_assert import soft_assert
from django.conf import settings
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
_assert = soft_assert(notify_admins=True, exponential_backoff=True)
_assert(False, warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
| Revert "log to file, don't email" | Revert "log to file, don't email"
This reverts commit a132890ef32c99b938021717b67c3e58c13952b0.
| Python | bsd-3-clause | qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | import logging
from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from django.conf import settings
logger = logging.getLogger('')
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
logger.error(warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
Revert "log to file, don't email"
This reverts commit a132890ef32c99b938021717b67c3e58c13952b0. | from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from corehq.util.soft_assert import soft_assert
from django.conf import settings
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
_assert = soft_assert(notify_admins=True, exponential_backoff=True)
_assert(False, warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
| <commit_before>import logging
from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from django.conf import settings
logger = logging.getLogger('')
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
logger.error(warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
<commit_msg>Revert "log to file, don't email"
This reverts commit a132890ef32c99b938021717b67c3e58c13952b0.<commit_after> | from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from corehq.util.soft_assert import soft_assert
from django.conf import settings
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
_assert = soft_assert(notify_admins=True, exponential_backoff=True)
_assert(False, warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
| import logging
from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from django.conf import settings
logger = logging.getLogger('')
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
logger.error(warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
Revert "log to file, don't email"
This reverts commit a132890ef32c99b938021717b67c3e58c13952b0.from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from corehq.util.soft_assert import soft_assert
from django.conf import settings
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
_assert = soft_assert(notify_admins=True, exponential_backoff=True)
_assert(False, warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
| <commit_before>import logging
from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from django.conf import settings
logger = logging.getLogger('')
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
logger.error(warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
<commit_msg>Revert "log to file, don't email"
This reverts commit a132890ef32c99b938021717b67c3e58c13952b0.<commit_after>from django.middleware.csrf import CsrfViewMiddleware, REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN
from corehq.util.soft_assert import soft_assert
from django.conf import settings
class HQCsrfViewMiddleWare(CsrfViewMiddleware):
def _reject(self, request, reason):
if settings.CSRF_ALWAYS_OFF and reason in [REASON_NO_CSRF_COOKIE, REASON_BAD_TOKEN]:
warning = "Request at {url} doesn't contain a csrf token. Letting the request pass through for now. "\
"Check if we are sending csrf_token in the corresponding POST form, if not fix it. "\
"Read more here https://github.com/dimagi/commcare-hq/pull/9227".format(
url=request.path
)
_assert = soft_assert(notify_admins=True, exponential_backoff=True)
_assert(False, warning)
return self._accept(request)
else:
return super(HQCsrfViewMiddleWare, self)._reject(request, reason)
|
ab4e279a6866d432cd1f58a07879e219360b4911 | src/tenyksscripts/scripts/8ball.py | src/tenyksscripts/scripts/8ball.py | import random
ateball = [
"It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful.",
]
def run(data, settings):
if '8ball' in data['payload']:
say = '{nick}: {fortune}'.format(nick=data['nick'],
fortune=random.choice(ateball))
return say
| import random
ateball = [
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes definitely",
"You may rely on it",
"As I see it yes",
"Most likely",
"Outlook good",
"Yes",
"Signs point to yes",
"Reply hazy try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful",
]
def run(data, settings):
if ('8ball' in data['payload']):
return random.choice(ateball)
| Revert "Added nickname and punct, removed parens" | Revert "Added nickname and punct, removed parens"
This reverts commit 061de1a57a95cd2911c06bb58c29a8e488b7387e.
| Python | mit | colby/tenyks-contrib,cblgh/tenyks-contrib,kyleterry/tenyks-contrib | import random
ateball = [
"It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful.",
]
def run(data, settings):
if '8ball' in data['payload']:
say = '{nick}: {fortune}'.format(nick=data['nick'],
fortune=random.choice(ateball))
return say
Revert "Added nickname and punct, removed parens"
This reverts commit 061de1a57a95cd2911c06bb58c29a8e488b7387e. | import random
ateball = [
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes definitely",
"You may rely on it",
"As I see it yes",
"Most likely",
"Outlook good",
"Yes",
"Signs point to yes",
"Reply hazy try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful",
]
def run(data, settings):
if ('8ball' in data['payload']):
return random.choice(ateball)
| <commit_before>import random
ateball = [
"It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful.",
]
def run(data, settings):
if '8ball' in data['payload']:
say = '{nick}: {fortune}'.format(nick=data['nick'],
fortune=random.choice(ateball))
return say
<commit_msg>Revert "Added nickname and punct, removed parens"
This reverts commit 061de1a57a95cd2911c06bb58c29a8e488b7387e.<commit_after> | import random
ateball = [
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes definitely",
"You may rely on it",
"As I see it yes",
"Most likely",
"Outlook good",
"Yes",
"Signs point to yes",
"Reply hazy try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful",
]
def run(data, settings):
if ('8ball' in data['payload']):
return random.choice(ateball)
| import random
ateball = [
"It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful.",
]
def run(data, settings):
if '8ball' in data['payload']:
say = '{nick}: {fortune}'.format(nick=data['nick'],
fortune=random.choice(ateball))
return say
Revert "Added nickname and punct, removed parens"
This reverts commit 061de1a57a95cd2911c06bb58c29a8e488b7387e.import random
ateball = [
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes definitely",
"You may rely on it",
"As I see it yes",
"Most likely",
"Outlook good",
"Yes",
"Signs point to yes",
"Reply hazy try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful",
]
def run(data, settings):
if ('8ball' in data['payload']):
return random.choice(ateball)
| <commit_before>import random
ateball = [
"It is certain.",
"It is decidedly so.",
"Without a doubt.",
"Yes, definitely.",
"You may rely on it.",
"As I see it, yes.",
"Most likely.",
"Outlook good.",
"Yes.",
"Signs point to yes.",
"Reply hazy, try again.",
"Ask again later.",
"Better not tell you now.",
"Cannot predict now.",
"Concentrate and ask again.",
"Don't count on it.",
"My reply is no.",
"My sources say no.",
"Outlook not so good.",
"Very doubtful.",
]
def run(data, settings):
if '8ball' in data['payload']:
say = '{nick}: {fortune}'.format(nick=data['nick'],
fortune=random.choice(ateball))
return say
<commit_msg>Revert "Added nickname and punct, removed parens"
This reverts commit 061de1a57a95cd2911c06bb58c29a8e488b7387e.<commit_after>import random
ateball = [
"It is certain",
"It is decidedly so",
"Without a doubt",
"Yes definitely",
"You may rely on it",
"As I see it yes",
"Most likely",
"Outlook good",
"Yes",
"Signs point to yes",
"Reply hazy try again",
"Ask again later",
"Better not tell you now",
"Cannot predict now",
"Concentrate and ask again",
"Don't count on it",
"My reply is no",
"My sources say no",
"Outlook not so good",
"Very doubtful",
]
def run(data, settings):
if ('8ball' in data['payload']):
return random.choice(ateball)
|
6cf3baed6e5f707e5c307388018f4bb3121327f9 | nanoservice/config.py | nanoservice/config.py | """ Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
| """ Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
| Access the conf like a object | Access the conf like a object
| Python | mit | walkr/nanoservice | """ Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
Access the conf like a object | """ Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
| <commit_before>""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
<commit_msg>Access the conf like a object<commit_after> | """ Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
| """ Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
Access the conf like a object""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
| <commit_before>""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = {}
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
<commit_msg>Access the conf like a object<commit_after>""" Read configuration for a service from a json file """
import io
import json
from .client import Client
from .error import ConfigError
class DotDict(dict):
""" Access a dictionary like an object """
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
self[key] = value
def load(filepath=None, filecontent=None, clients=True):
""" Read the json file located at `filepath`
If `filecontent` is specified, its content will be json decoded
and loaded instead. The `clients` arg is a binary flag
which specifies whether the endpoints present in config (`filecontent`),
should be used to create `Client` objects.
Usage:
config.load(filepath=None, filecontent=None):
Provide either a filepath or a json string
"""
conf = DotDict()
# Read json configuration
assert filepath or filecontent
if not filecontent:
with io.FileIO(filepath) as fh:
filecontent = fh.read().decode('utf-8')
configs = json.loads(filecontent)
if 'service.endpoint' not in configs:
raise ConfigError('Missing `service.endpoint` from config file')
# Update the conf items (Create clients if necessary)
for key, value in configs.items():
conf[key] = value
if key.endswith('.endpoint') and clients:
conf[key] = Client(value)
return conf
|
1fb34b960f10d362fbc436c47fafc127be59584e | template_utils/templatetags/generic_markup.py | template_utils/templatetags/generic_markup.py | """
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
| """
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
register.filter(smartypants)
| Enable the SmartyPants filter; need to document it later | Enable the SmartyPants filter; need to document it later
git-svn-id: 4b29f3e8959dfd6aa2f99bd14fd314e33970d95d@74 d6b9e1ad-042d-0410-a639-15a354c1509c
| Python | bsd-3-clause | clones/django-template-utils | """
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
Enable the SmartyPants filter; need to document it later
git-svn-id: 4b29f3e8959dfd6aa2f99bd14fd314e33970d95d@74 d6b9e1ad-042d-0410-a639-15a354c1509c | """
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
register.filter(smartypants)
| <commit_before>"""
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
<commit_msg>Enable the SmartyPants filter; need to document it later
git-svn-id: 4b29f3e8959dfd6aa2f99bd14fd314e33970d95d@74 d6b9e1ad-042d-0410-a639-15a354c1509c<commit_after> | """
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
register.filter(smartypants)
| """
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
Enable the SmartyPants filter; need to document it later
git-svn-id: 4b29f3e8959dfd6aa2f99bd14fd314e33970d95d@74 d6b9e1ad-042d-0410-a639-15a354c1509c"""
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
register.filter(smartypants)
| <commit_before>"""
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
<commit_msg>Enable the SmartyPants filter; need to document it later
git-svn-id: 4b29f3e8959dfd6aa2f99bd14fd314e33970d95d@74 d6b9e1ad-042d-0410-a639-15a354c1509c<commit_after>"""
Filters for converting plain text to HTML and enhancing the
typographic appeal of text on the Web.
"""
from django.conf import settings
from django.template import Library
from template_utils.markup import formatter
def apply_markup(value, arg=None):
"""
Applies text-to-HTML conversion.
Takes an optional argument to specify the name of a filter to use.
"""
if arg is not None:
return formatter(value, filter_name=arg)
return formatter(value)
def smartypants(value):
"""
Applies SmartyPants to a piece of text, applying typographic
niceties.
Requires the Python SmartyPants library to be installed; see
http://web.chad.org/projects/smartypants.py/
"""
try:
from smartypants import smartyPants
except ImportError:
if settings.DEBUG:
raise template.TemplateSyntaxError("Error in smartypants filter: the Python smartypants module is not installed or could not be imported")
return value
else:
return smartyPants(value)
register = Library()
register.filter(apply_markup)
register.filter(smartypants)
|
2b05a59b09e72f263761dae2feac360f5abd1f82 | promgen/__init__.py | promgen/__init__.py | default_app_config = 'promgen.apps.PromgenConfig'
import logging
logging.basicConfig(level=logging.DEBUG)
| default_app_config = 'promgen.apps.PromgenConfig'
| Remove some debug logging config | Remove some debug logging config
| Python | mit | kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen | default_app_config = 'promgen.apps.PromgenConfig'
import logging
logging.basicConfig(level=logging.DEBUG)
Remove some debug logging config | default_app_config = 'promgen.apps.PromgenConfig'
| <commit_before>default_app_config = 'promgen.apps.PromgenConfig'
import logging
logging.basicConfig(level=logging.DEBUG)
<commit_msg>Remove some debug logging config<commit_after> | default_app_config = 'promgen.apps.PromgenConfig'
| default_app_config = 'promgen.apps.PromgenConfig'
import logging
logging.basicConfig(level=logging.DEBUG)
Remove some debug logging configdefault_app_config = 'promgen.apps.PromgenConfig'
| <commit_before>default_app_config = 'promgen.apps.PromgenConfig'
import logging
logging.basicConfig(level=logging.DEBUG)
<commit_msg>Remove some debug logging config<commit_after>default_app_config = 'promgen.apps.PromgenConfig'
|
30a836c9603ebb9289887a766e3c053a14c23c9f | archlinux/archpack_settings.py | archlinux/archpack_settings.py | #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.6.1",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.7",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| Update Arch package to 2.7 | Update Arch package to 2.7
| Python | bsd-2-clause | biicode/packages,bowlofstew/packages,bowlofstew/packages,biicode/packages | #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.6.1",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
Update Arch package to 2.7 | #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.7",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| <commit_before>#
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.6.1",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
<commit_msg>Update Arch package to 2.7<commit_after> | #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.7",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| #
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.6.1",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
Update Arch package to 2.7#
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.7",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
| <commit_before>#
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.6.1",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
<commit_msg>Update Arch package to 2.7<commit_after>#
# Biicode Arch Linux package settings.
#
# Check PKGBUILD_template docs for those settings and
# what they mean.
#
def settings():
return { "version": "2.7",
"release_number": "1",
"arch_deps": ["cmake>=3.0.2",
"zlib",
"glibc",
"sqlite",
"wget",
"python2-pmw"
],
"debian_deps": ["zlib1g",
"libc-bin",
"libsqlite3-0",
"wget",
"lib32z1",
"python-tk"
]
}
if __name__ == '__main__':
print(settings())
|
d4274336756ed6d6c36f94cbaae7e8328ac50f9a | djedi/auth/__init__.py | djedi/auth/__init__.py | def has_permission(request):
user = request.user
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
| import logging
_log = logging.getLogger(__name__)
def has_permission(request):
user = getattr(request, 'user', None)
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
else:
_log.warning("Request does not have `user` attribute. Make sure that "
"Djedi middleware is used after AuthenticationMiddleware")
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
| Handle wrong order of middleware. | Handle wrong order of middleware.
| Python | bsd-3-clause | andreif/djedi-cms,andreif/djedi-cms,andreif/djedi-cms,5monkeys/djedi-cms,5monkeys/djedi-cms,5monkeys/djedi-cms | def has_permission(request):
user = request.user
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
Handle wrong order of middleware. | import logging
_log = logging.getLogger(__name__)
def has_permission(request):
user = getattr(request, 'user', None)
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
else:
_log.warning("Request does not have `user` attribute. Make sure that "
"Djedi middleware is used after AuthenticationMiddleware")
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
| <commit_before>def has_permission(request):
user = request.user
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
<commit_msg>Handle wrong order of middleware.<commit_after> | import logging
_log = logging.getLogger(__name__)
def has_permission(request):
user = getattr(request, 'user', None)
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
else:
_log.warning("Request does not have `user` attribute. Make sure that "
"Djedi middleware is used after AuthenticationMiddleware")
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
| def has_permission(request):
user = request.user
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
Handle wrong order of middleware.import logging
_log = logging.getLogger(__name__)
def has_permission(request):
user = getattr(request, 'user', None)
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
else:
_log.warning("Request does not have `user` attribute. Make sure that "
"Djedi middleware is used after AuthenticationMiddleware")
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
| <commit_before>def has_permission(request):
user = request.user
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
<commit_msg>Handle wrong order of middleware.<commit_after>import logging
_log = logging.getLogger(__name__)
def has_permission(request):
user = getattr(request, 'user', None)
if user:
if user.is_superuser:
return True
if user.is_staff and user.groups.filter(name__iexact='djedi').exists():
return True
else:
_log.warning("Request does not have `user` attribute. Make sure that "
"Djedi middleware is used after AuthenticationMiddleware")
return False
def get_username(request):
user = request.user
if hasattr(user, 'get_username'):
return user.get_username()
else:
return user.username
|
2dfc3817881d9e90456dc3ea94b1fd0ec308fb5e | beavy/common/morphing_field.py | beavy/common/morphing_field.py | from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity and obj.__mapper__.polymorphic_identity.__class__.__name__
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
| from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
| Fix MorphingField: polymorphic_identy is already the string we want | Fix MorphingField: polymorphic_identy is already the string we want
| Python | mpl-2.0 | beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy | from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity and obj.__mapper__.polymorphic_identity.__class__.__name__
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
Fix MorphingField: polymorphic_identy is already the string we want | from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
| <commit_before>from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity and obj.__mapper__.polymorphic_identity.__class__.__name__
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
<commit_msg>Fix MorphingField: polymorphic_identy is already the string we want<commit_after> | from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
| from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity and obj.__mapper__.polymorphic_identity.__class__.__name__
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
Fix MorphingField: polymorphic_identy is already the string we wantfrom marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
| <commit_before>from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity and obj.__mapper__.polymorphic_identity.__class__.__name__
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
<commit_msg>Fix MorphingField: polymorphic_identy is already the string we want<commit_after>from marshmallow.fields import Field
class MorphingField(Field):
# registry = {
# }
def __init__(self, many=False, fallback=None, overwrite=None, **metadata):
self.many = False
self.fallback = fallback or self.FALLBACK
self.overwrite = overwrite
# Common alternative:
# def _obj_to_name(self, obj):
# return obj.__class__.__name__
def _obj_to_name(self, obj):
return obj.__mapper__.polymorphic_identity
def _serialize(self, value, attr, obj):
if value is None:
return None
if self.many:
return [self._get_serializer(value).dump(x).data for x in value]
return self._get_serializer(value).dump(value).data
def _get_serializer(self, obj):
name = self._obj_to_name(obj)
if self.overwrite:
kls = self.overwrite(obj, name)
if isinstance(kls, str):
name = kls
elif callable(kls):
return kls()
return self.registry.get(name, self.fallback)()
|
56b469eb2836d1fb6c2a7702b4693978512ecb51 | common/migrations/admin_unit_codes.py | common/migrations/admin_unit_codes.py | # -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 1000 start 1000 minvalue 1000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
| # -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 10000 start 10000 minvalue 10000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
| Fix wards code sequences restart | Fix wards code sequences restart
| Python | mit | MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api | # -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 1000 start 1000 minvalue 1000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
Fix wards code sequences restart | # -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 10000 start 10000 minvalue 10000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
| <commit_before># -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 1000 start 1000 minvalue 1000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
<commit_msg>Fix wards code sequences restart<commit_after> | # -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 10000 start 10000 minvalue 10000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
| # -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 1000 start 1000 minvalue 1000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
Fix wards code sequences restart# -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 10000 start 10000 minvalue 10000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
| <commit_before># -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 1000 start 1000 minvalue 1000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
<commit_msg>Fix wards code sequences restart<commit_after># -*- coding: utf-8 -*-
from django.db import models, migrations
from facilities.models import Facility
def set_min_code_value(apps, schema_editor):
from django.db import connection
cursor = connection.cursor()
sql = """
ALTER SEQUENCE common_constituency_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_county_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_subcounty_code_seq restart 1000 start 1000 minvalue 1000;
ALTER SEQUENCE common_ward_code_seq restart 10000 start 10000 minvalue 10000;
"""
cursor = cursor.execute(sql)
class Migration(migrations.Migration):
dependencies = [
('common', '0001_initial'),
]
operations = [
migrations.RunPython(set_min_code_value),
]
|
287c2da6d72155a4988665ac3c4031032dd835e3 | admin_tests/common_auth/test_logs.py | admin_tests/common_auth/test_logs.py | from nose import tools as nt
from tests.base import AdminTestCase
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
update_admin_log('123', 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, 123)
| from nose import tools as nt
from tests.base import AdminTestCase
from osf_tests.factories import UserFactory
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
user = UserFactory()
update_admin_log(user.id, 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, user.id)
| Fix log test to use real user and id | Fix log test to use real user and id
| Python | apache-2.0 | sloria/osf.io,cwisecarver/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,laurenrevere/osf.io,aaxelb/osf.io,TomBaxter/osf.io,binoculars/osf.io,chrisseto/osf.io,cslzchen/osf.io,adlius/osf.io,baylee-d/osf.io,caneruguz/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,leb2dg/osf.io,icereval/osf.io,chrisseto/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,adlius/osf.io,mattclark/osf.io,leb2dg/osf.io,icereval/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,laurenrevere/osf.io,felliott/osf.io,mfraezz/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,icereval/osf.io,crcresearch/osf.io,aaxelb/osf.io,TomBaxter/osf.io,chrisseto/osf.io,chennan47/osf.io,mfraezz/osf.io,chennan47/osf.io,cslzchen/osf.io,caneruguz/osf.io,crcresearch/osf.io,sloria/osf.io,caseyrollins/osf.io,felliott/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,binoculars/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,erinspace/osf.io,laurenrevere/osf.io,baylee-d/osf.io,mfraezz/osf.io,chrisseto/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,adlius/osf.io,chennan47/osf.io,baylee-d/osf.io,Nesiehr/osf.io,cslzchen/osf.io,pattisdr/osf.io,caneruguz/osf.io,erinspace/osf.io,erinspace/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,hmoco/osf.io,aaxelb/osf.io,cwisecarver/osf.io,felliott/osf.io,adlius/osf.io,crcresearch/osf.io,leb2dg/osf.io,Nesiehr/osf.io,mattclark/osf.io,hmoco/osf.io,Nesiehr/osf.io,leb2dg/osf.io,Nesiehr/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,sloria/osf.io,aaxelb/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,binoculars/osf.io | from nose import tools as nt
from tests.base import AdminTestCase
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
update_admin_log('123', 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, 123)
Fix log test to use real user and id | from nose import tools as nt
from tests.base import AdminTestCase
from osf_tests.factories import UserFactory
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
user = UserFactory()
update_admin_log(user.id, 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, user.id)
| <commit_before>from nose import tools as nt
from tests.base import AdminTestCase
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
update_admin_log('123', 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, 123)
<commit_msg>Fix log test to use real user and id<commit_after> | from nose import tools as nt
from tests.base import AdminTestCase
from osf_tests.factories import UserFactory
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
user = UserFactory()
update_admin_log(user.id, 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, user.id)
| from nose import tools as nt
from tests.base import AdminTestCase
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
update_admin_log('123', 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, 123)
Fix log test to use real user and idfrom nose import tools as nt
from tests.base import AdminTestCase
from osf_tests.factories import UserFactory
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
user = UserFactory()
update_admin_log(user.id, 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, user.id)
| <commit_before>from nose import tools as nt
from tests.base import AdminTestCase
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
update_admin_log('123', 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, 123)
<commit_msg>Fix log test to use real user and id<commit_after>from nose import tools as nt
from tests.base import AdminTestCase
from osf_tests.factories import UserFactory
from osf.models.admin_log_entry import AdminLogEntry, update_admin_log
class TestUpdateAdminLog(AdminTestCase):
def test_add_log(self):
user = UserFactory()
update_admin_log(user.id, 'dfqc2', 'This', 'log_added')
nt.assert_equal(AdminLogEntry.objects.count(), 1)
log = AdminLogEntry.objects.latest('action_time')
nt.assert_equal(log.user_id, user.id)
|
07fabcc0fa08d95ec5f17f5cbfcd0c14b645f31c | child_compassion/migrations/11.0.1.0.0/post-migration.py | child_compassion/migrations/11.0.1.0.0/post-migration.py | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
| ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
# Add sponsorship group to everyone
sponsorship_group = env.ref('child_compassion.group_sponsorship')
env['res.users'].search([
('internal', '=', True),
('email', 'ilike', 'compassion.ch')
]).write({
'groups_id': [(4, sponsorship_group.id)]
})
# Add admin groups
sponsorship_manager_group = env.ref('child_compassion.group_manager')
gmc_manager_group = env.ref('message_center_compassion.group_gmc_manager')
env['res.users'].search([
('login', 'in', ['ecino', 'dwulliamoz', 'seicher', 'admin']),
]).write({
'groups_id': [(4, sponsorship_manager_group.id), (4, gmc_manager_group.id)]
})
| Add migration for assigning security groups | Add migration for assigning security groups
| Python | agpl-3.0 | CompassionCH/compassion-modules,CompassionCH/compassion-modules,CompassionCH/compassion-modules,CompassionCH/compassion-modules | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
Add migration for assigning security groups | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
# Add sponsorship group to everyone
sponsorship_group = env.ref('child_compassion.group_sponsorship')
env['res.users'].search([
('internal', '=', True),
('email', 'ilike', 'compassion.ch')
]).write({
'groups_id': [(4, sponsorship_group.id)]
})
# Add admin groups
sponsorship_manager_group = env.ref('child_compassion.group_manager')
gmc_manager_group = env.ref('message_center_compassion.group_gmc_manager')
env['res.users'].search([
('login', 'in', ['ecino', 'dwulliamoz', 'seicher', 'admin']),
]).write({
'groups_id': [(4, sponsorship_manager_group.id), (4, gmc_manager_group.id)]
})
| <commit_before>##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
<commit_msg>Add migration for assigning security groups<commit_after> | ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
# Add sponsorship group to everyone
sponsorship_group = env.ref('child_compassion.group_sponsorship')
env['res.users'].search([
('internal', '=', True),
('email', 'ilike', 'compassion.ch')
]).write({
'groups_id': [(4, sponsorship_group.id)]
})
# Add admin groups
sponsorship_manager_group = env.ref('child_compassion.group_manager')
gmc_manager_group = env.ref('message_center_compassion.group_gmc_manager')
env['res.users'].search([
('login', 'in', ['ecino', 'dwulliamoz', 'seicher', 'admin']),
]).write({
'groups_id': [(4, sponsorship_manager_group.id), (4, gmc_manager_group.id)]
})
| ##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
Add migration for assigning security groups##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
# Add sponsorship group to everyone
sponsorship_group = env.ref('child_compassion.group_sponsorship')
env['res.users'].search([
('internal', '=', True),
('email', 'ilike', 'compassion.ch')
]).write({
'groups_id': [(4, sponsorship_group.id)]
})
# Add admin groups
sponsorship_manager_group = env.ref('child_compassion.group_manager')
gmc_manager_group = env.ref('message_center_compassion.group_gmc_manager')
env['res.users'].search([
('login', 'in', ['ecino', 'dwulliamoz', 'seicher', 'admin']),
]).write({
'groups_id': [(4, sponsorship_manager_group.id), (4, gmc_manager_group.id)]
})
| <commit_before>##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
<commit_msg>Add migration for assigning security groups<commit_after>##############################################################################
#
# Copyright (C) 2018 Compassion CH (http://www.compassion.ch)
# @author: Nathan Flückiger <nathan.fluckiger@hotmail.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from openupgradelib import openupgrade
from odoo.addons.child_compassion import load_mappings
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
load_mappings(env.cr, env)
# Add sponsorship group to everyone
sponsorship_group = env.ref('child_compassion.group_sponsorship')
env['res.users'].search([
('internal', '=', True),
('email', 'ilike', 'compassion.ch')
]).write({
'groups_id': [(4, sponsorship_group.id)]
})
# Add admin groups
sponsorship_manager_group = env.ref('child_compassion.group_manager')
gmc_manager_group = env.ref('message_center_compassion.group_gmc_manager')
env['res.users'].search([
('login', 'in', ['ecino', 'dwulliamoz', 'seicher', 'admin']),
]).write({
'groups_id': [(4, sponsorship_manager_group.id), (4, gmc_manager_group.id)]
})
|
ffd8bb1e85fe7ed80d85062e4d5932f28065b84c | auditlog/apps.py | auditlog/apps.py | from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
| from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
| Apply default_auto_field to app config. | Apply default_auto_field to app config.
| Python | mit | jjkester/django-auditlog | from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
Apply default_auto_field to app config. | from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
| <commit_before>from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
<commit_msg>Apply default_auto_field to app config.<commit_after> | from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
| from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
Apply default_auto_field to app config.from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
| <commit_before>from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
<commit_msg>Apply default_auto_field to app config.<commit_after>from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
|
f6072411bf097ae3d493f5c95d05f4711fdc5195 | Discord/cogs/python.py | Discord/cogs/python.py |
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
|
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
if resp.status == 404:
return await ctx.embed_reply(f"{ctx.bot.error_emoji} Package not found")
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
| Handle package not found in pypi command | [Discord] Handle package not found in pypi command
| Python | mit | Harmon758/Harmonbot,Harmon758/Harmonbot |
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
[Discord] Handle package not found in pypi command |
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
if resp.status == 404:
return await ctx.embed_reply(f"{ctx.bot.error_emoji} Package not found")
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
| <commit_before>
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
<commit_msg>[Discord] Handle package not found in pypi command<commit_after> |
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
if resp.status == 404:
return await ctx.embed_reply(f"{ctx.bot.error_emoji} Package not found")
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
|
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
[Discord] Handle package not found in pypi command
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
if resp.status == 404:
return await ctx.embed_reply(f"{ctx.bot.error_emoji} Package not found")
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
| <commit_before>
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
<commit_msg>[Discord] Handle package not found in pypi command<commit_after>
from discord.ext import commands
from utilities import checks
def setup(bot):
bot.add_cog(Python())
class Python(commands.Cog):
async def cog_check(self, ctx):
return await checks.not_forbidden().predicate(ctx)
@commands.command()
async def pep(self, ctx, number: int):
'''Generate Python Enhancement Proposal URL'''
await ctx.embed_reply(f"https://www.python.org/dev/peps/pep-{number:04}/")
@commands.command()
async def pypi(self, ctx, package: str):
'''Information about a package on PyPI'''
url = f"https://pypi.python.org/pypi/{package}/json"
async with ctx.bot.aiohttp_session.get(url) as resp:
if resp.status == 404:
return await ctx.embed_reply(f"{ctx.bot.error_emoji} Package not found")
data = await resp.json()
await ctx.embed_reply(title = data["info"]["name"], title_url = data["info"]["package_url"],
description = data["info"]["summary"], fields = (("Version", data["info"]["version"]),))
|
2032de8bcf6ae6ed84a09ce3e294bae8fd86962a | dog/core/ext/health.py | dog/core/ext/health.py | """
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((msg.created_at - datetime.datetime.utcnow()).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
| """
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((datetime.datetime.utcnow() - msg.created_at).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
| Fix gateway lag being negative in d?ping | Fix gateway lag being negative in d?ping
| Python | mit | sliceofcode/dogbot,slice/dogbot,slice/dogbot,slice/dogbot,sliceofcode/dogbot | """
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((msg.created_at - datetime.datetime.utcnow()).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
Fix gateway lag being negative in d?ping | """
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((datetime.datetime.utcnow() - msg.created_at).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
| <commit_before>"""
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((msg.created_at - datetime.datetime.utcnow()).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
<commit_msg>Fix gateway lag being negative in d?ping<commit_after> | """
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((datetime.datetime.utcnow() - msg.created_at).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
| """
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((msg.created_at - datetime.datetime.utcnow()).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
Fix gateway lag being negative in d?ping"""
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((datetime.datetime.utcnow() - msg.created_at).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
| <commit_before>"""
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((msg.created_at - datetime.datetime.utcnow()).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
<commit_msg>Fix gateway lag being negative in d?ping<commit_after>"""
Commands used to check the health of the bot.
"""
import datetime
from time import monotonic
from dog import Cog
from discord.ext import commands
class Health(Cog):
@commands.command()
async def ping(self, ctx):
""" Pong! """
# measure gateway delay
before = monotonic()
msg = await ctx.send('\u200b')
after = monotonic()
pong_ws = round(ctx.bot.latency * 1000, 2)
pong_rest = round((after - before) * 1000, 2)
pong_gateway_lag = round((datetime.datetime.utcnow() - msg.created_at).total_seconds() * 1000, 2)
pong = f'Pong! WS: {pong_ws}ms, REST: {pong_rest}ms, GW lag: {pong_gateway_lag}ms'
await msg.edit(content=pong)
def setup(bot):
bot.add_cog(Health(bot))
|
a0585269f05189fb9ae4f5abe98cd36731ad8a53 | babel_util/scripts/json_to_pajek.py | babel_util/scripts/json_to_pajek.py | #!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
| #!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
| Support for multiple subjects and filtering out non-wos ids | Support for multiple subjects and filtering out non-wos ids
| Python | agpl-3.0 | jevinw/rec_utilities,jevinw/rec_utilities | #!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
Support for multiple subjects and filtering out non-wos ids | #!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
| <commit_before>#!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
<commit_msg>Support for multiple subjects and filtering out non-wos ids<commit_after> | #!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
| #!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
Support for multiple subjects and filtering out non-wos ids#!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
| <commit_before>#!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this.")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.subject and arguments.subject not in entry["subject"]:
continue
for citation in entry["citations"]:
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
<commit_msg>Support for multiple subjects and filtering out non-wos ids<commit_after>#!/usr/bin/env python3
from util.misc import open_file, Benchmark
from util.PajekFactory import PajekFactory
import ujson
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Creates Pajek (.net) files from JSON")
parser.add_argument('outfile')
parser.add_argument('--temp-dir', help="Directory to store temporary files in", default=None)
parser.add_argument('--subject', '-s', help="For WoS, subject must include this. Can be a comma seperated list.")
parser.add_argument('--wos-only', help="For WoS, exclude any citations or ids that contain a dot (.)", action="store_true")
parser.add_argument('infile', nargs='+')
arguments = parser.parse_args()
b = Benchmark()
pjk = PajekFactory(temp_dir=arguments.temp_dir)
subjects = None
if arguments.subject:
subjects = set(arguments.subject.split(","))
for filename in arguments.infile:
with open_file(filename) as f:
for line in f:
entry = ujson.loads(line)
b.increment()
if arguments.wos_only and '.' in entry["id"]:
continue
if subjects and not subject.intersection(entry["subject"]):
continue
for citation in entry["citations"]:
if arguments.wos_only and '.' in citation:
continue
pjk.add_edge(entry["id"], citation)
b.print_freq()
with open_file(arguments.outfile, "w") as f:
pjk.write(f)
|
5f001d818459a2bd5e9f6a89e8ed097d379a26d2 | runtime/__init__.py | runtime/__init__.py | import builtins
import operator
import functools
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': __import__
, 'foldl': functools.reduce
, '~:': functools.partial
})
| import builtins
import operator
import functools
import importlib
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
| Use importlib.import_module instead of __import__. | Use importlib.import_module instead of __import__.
| Python | mit | pyos/dg | import builtins
import operator
import functools
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': __import__
, 'foldl': functools.reduce
, '~:': functools.partial
})
Use importlib.import_module instead of __import__. | import builtins
import operator
import functools
import importlib
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
| <commit_before>import builtins
import operator
import functools
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': __import__
, 'foldl': functools.reduce
, '~:': functools.partial
})
<commit_msg>Use importlib.import_module instead of __import__.<commit_after> | import builtins
import operator
import functools
import importlib
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
| import builtins
import operator
import functools
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': __import__
, 'foldl': functools.reduce
, '~:': functools.partial
})
Use importlib.import_module instead of __import__.import builtins
import operator
import functools
import importlib
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
| <commit_before>import builtins
import operator
import functools
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': __import__
, 'foldl': functools.reduce
, '~:': functools.partial
})
<commit_msg>Use importlib.import_module instead of __import__.<commit_after>import builtins
import operator
import functools
import importlib
from ..compile import varary
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
|
9a0ababb3b8b4b23184ab7005d995c17edef2a2b | src/test/test_imagesaveblock.py | src/test/test_imagesaveblock.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
def tearDown(self):
os.remove("test_saved.png")
if __name__ == '__main__':
unittest.main()
| Remove saved file in TestImageSaveBlock test case | Remove saved file in TestImageSaveBlock test case
| Python | lgpl-2.1 | anton-golubkov/Garland,anton-golubkov/Garland | #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
if __name__ == '__main__':
unittest.main()
Remove saved file in TestImageSaveBlock test case | #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
def tearDown(self):
os.remove("test_saved.png")
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove saved file in TestImageSaveBlock test case<commit_after> | #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
def tearDown(self):
os.remove("test_saved.png")
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
if __name__ == '__main__':
unittest.main()
Remove saved file in TestImageSaveBlock test case#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
def tearDown(self):
os.remove("test_saved.png")
if __name__ == '__main__':
unittest.main()
| <commit_before>#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
if __name__ == '__main__':
unittest.main()
<commit_msg>Remove saved file in TestImageSaveBlock test case<commit_after>#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
import cv
import os, sys
cmd_folder, f = os.path.split(os.path.dirname(os.path.abspath(__file__)))
if cmd_folder not in sys.path:
sys.path.insert(0, cmd_folder)
import ipf.ipfblock.imagesave
class TestImageSaveBlock(unittest.TestCase):
def setUp(self):
self.block = ipf.ipfblock.imagesave.ImageSave()
self.block.properties["file_name"].value = "test_saved.png"
def test_save_image(self):
""" Test save image to file
"""
image = cv.LoadImage("test.png")
self.block.input_ports["input_image"].pass_value(image)
self.block.process()
saved_image = cv.LoadImage("test_saved.png")
self.assertEqual(saved_image.tostring(), image.tostring())
def tearDown(self):
os.remove("test_saved.png")
if __name__ == '__main__':
unittest.main()
|
67a20401caaa63852e95fcaf8bafb6ed85ecd1f2 | test/selenium/src/lib/page/modal/__init__.py | test/selenium/src/lib/page/modal/__init__.py | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object # flake8: noqa
)
| # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object, # flake8: noqa
update_object # flake8: noqa
)
| Add a sort of hierarchy of modules for package | Add a sort of hierarchy of modules for package
| Python | apache-2.0 | AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object # flake8: noqa
)
Add a sort of hierarchy of modules for package | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object, # flake8: noqa
update_object # flake8: noqa
)
| <commit_before># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object # flake8: noqa
)
<commit_msg>Add a sort of hierarchy of modules for package<commit_after> | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object, # flake8: noqa
update_object # flake8: noqa
)
| # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object # flake8: noqa
)
Add a sort of hierarchy of modules for package# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object, # flake8: noqa
update_object # flake8: noqa
)
| <commit_before># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object # flake8: noqa
)
<commit_msg>Add a sort of hierarchy of modules for package<commit_after># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
from lib.page.modal import (
create_new_object, # flake8: noqa
edit_object, # flake8: noqa
delete_object, # flake8: noqa
update_object # flake8: noqa
)
|
29d895b23e8a4656a82a9a39489c354b67b2b067 | bioagents/databases/chebi_client.py | bioagents/databases/chebi_client.py | import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
chebi_client = suds.client.Client(chebi_wsdl)
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
| import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
try:
chebi_client = suds.client.Client(chebi_wsdl)
except Exception as e:
logger.error('ChEBI web service is unavailable.')
chebi_client = None
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
if chebi_client is None:
return None
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
| Handle webservice problems in ChEBI client | Handle webservice problems in ChEBI client
| Python | bsd-2-clause | bgyori/bioagents,sorgerlab/bioagents | import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
chebi_client = suds.client.Client(chebi_wsdl)
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
Handle webservice problems in ChEBI client | import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
try:
chebi_client = suds.client.Client(chebi_wsdl)
except Exception as e:
logger.error('ChEBI web service is unavailable.')
chebi_client = None
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
if chebi_client is None:
return None
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
| <commit_before>import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
chebi_client = suds.client.Client(chebi_wsdl)
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
<commit_msg>Handle webservice problems in ChEBI client<commit_after> | import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
try:
chebi_client = suds.client.Client(chebi_wsdl)
except Exception as e:
logger.error('ChEBI web service is unavailable.')
chebi_client = None
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
if chebi_client is None:
return None
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
| import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
chebi_client = suds.client.Client(chebi_wsdl)
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
Handle webservice problems in ChEBI clientimport suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
try:
chebi_client = suds.client.Client(chebi_wsdl)
except Exception as e:
logger.error('ChEBI web service is unavailable.')
chebi_client = None
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
if chebi_client is None:
return None
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
| <commit_before>import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
chebi_client = suds.client.Client(chebi_wsdl)
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
<commit_msg>Handle webservice problems in ChEBI client<commit_after>import suds
import re
import logging
logger = logging.getLogger('suds')
logger.setLevel(logging.ERROR)
chebi_wsdl = 'http://www.ebi.ac.uk/webservices/chebi/2.0/webservice?wsdl'
try:
chebi_client = suds.client.Client(chebi_wsdl)
except Exception as e:
logger.error('ChEBI web service is unavailable.')
chebi_client = None
def get_id(name, max_results=1):
# TODO: reimplement to get result from actual returned object
# not based on string matching
if chebi_client is None:
return None
res = chebi_client.service.getLiteEntity(name, 'CHEBI NAME',
max_results, 'ALL')
res_str = str(res)
if res_str == '':
return None
match = re.search(r'"CHEBI:(.*)"', res_str)
chebi_id = match.groups()[0]
return chebi_id
|
be0a9da80d46630d8958aa95838c5c7c67dda375 | blanc_basic_podcast/podcast/views.py | blanc_basic_podcast/podcast/views.py | from django.views.generic import ListView, DateDetailView
from .models import PodcastFile
class PodcastFileListView(ListView):
queryset = PodcastFile.objects.filter(published=True)
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
| from django.views.generic import ListView, DateDetailView
from django.utils import timezone
from django.conf import settings
from .models import PodcastFile
class PodcastFileListView(ListView):
paginate_by = getattr(settings, 'PODCAST_PER_PAGE', 10)
def get_queryset(self):
return PodcastFile.objects.filter(published=True,
date__lte=timezone.now())
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
| Fix the list view for podcast files, and allow custom per page number in settings | Fix the list view for podcast files, and allow custom per page number in settings
| Python | bsd-2-clause | blancltd/blanc-basic-podcast | from django.views.generic import ListView, DateDetailView
from .models import PodcastFile
class PodcastFileListView(ListView):
queryset = PodcastFile.objects.filter(published=True)
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
Fix the list view for podcast files, and allow custom per page number in settings | from django.views.generic import ListView, DateDetailView
from django.utils import timezone
from django.conf import settings
from .models import PodcastFile
class PodcastFileListView(ListView):
paginate_by = getattr(settings, 'PODCAST_PER_PAGE', 10)
def get_queryset(self):
return PodcastFile.objects.filter(published=True,
date__lte=timezone.now())
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
| <commit_before>from django.views.generic import ListView, DateDetailView
from .models import PodcastFile
class PodcastFileListView(ListView):
queryset = PodcastFile.objects.filter(published=True)
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
<commit_msg>Fix the list view for podcast files, and allow custom per page number in settings<commit_after> | from django.views.generic import ListView, DateDetailView
from django.utils import timezone
from django.conf import settings
from .models import PodcastFile
class PodcastFileListView(ListView):
paginate_by = getattr(settings, 'PODCAST_PER_PAGE', 10)
def get_queryset(self):
return PodcastFile.objects.filter(published=True,
date__lte=timezone.now())
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
| from django.views.generic import ListView, DateDetailView
from .models import PodcastFile
class PodcastFileListView(ListView):
queryset = PodcastFile.objects.filter(published=True)
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
Fix the list view for podcast files, and allow custom per page number in settingsfrom django.views.generic import ListView, DateDetailView
from django.utils import timezone
from django.conf import settings
from .models import PodcastFile
class PodcastFileListView(ListView):
paginate_by = getattr(settings, 'PODCAST_PER_PAGE', 10)
def get_queryset(self):
return PodcastFile.objects.filter(published=True,
date__lte=timezone.now())
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
| <commit_before>from django.views.generic import ListView, DateDetailView
from .models import PodcastFile
class PodcastFileListView(ListView):
queryset = PodcastFile.objects.filter(published=True)
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
<commit_msg>Fix the list view for podcast files, and allow custom per page number in settings<commit_after>from django.views.generic import ListView, DateDetailView
from django.utils import timezone
from django.conf import settings
from .models import PodcastFile
class PodcastFileListView(ListView):
paginate_by = getattr(settings, 'PODCAST_PER_PAGE', 10)
def get_queryset(self):
return PodcastFile.objects.filter(published=True,
date__lte=timezone.now())
class PodcastFileDetailView(DateDetailView):
queryset = PodcastFile.objects.filter(published=True)
month_format = '%m'
date_field = 'date'
|
9334e8059cd74086278133345566c4b4591c81a4 | amgut/__init__.py | amgut/__init__.py | #!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG']
| #!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
from amgut.lib.data_access.sql_connection import SQLConnectionHandler
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
db_conn = SQLConnectionHandler()
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG',
'db_conn']
| Add DB connection to amgut init module | Add DB connection to amgut init module
| Python | bsd-3-clause | biocore/american-gut-web,ElDeveloper/american-gut-web,PersonalGenomesOrg/american-gut-web,josenavas/american-gut-web,josenavas/american-gut-web,wasade/american-gut-web,mortonjt/american-gut-web,adamrp/american-gut-web,biocore/american-gut-web,adamrp/american-gut-web,mortonjt/american-gut-web,mortonjt/american-gut-web,squirrelo/american-gut-web,ElDeveloper/american-gut-web,wasade/american-gut-web,squirrelo/american-gut-web,PersonalGenomesOrg/american-gut-web,PersonalGenomesOrg/american-gut-web,squirrelo/american-gut-web,ElDeveloper/american-gut-web,biocore/american-gut-web,adamrp/american-gut-web,josenavas/american-gut-web | #!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG']
Add DB connection to amgut init module | #!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
from amgut.lib.data_access.sql_connection import SQLConnectionHandler
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
db_conn = SQLConnectionHandler()
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG',
'db_conn']
| <commit_before>#!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG']
<commit_msg>Add DB connection to amgut init module<commit_after> | #!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
from amgut.lib.data_access.sql_connection import SQLConnectionHandler
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
db_conn = SQLConnectionHandler()
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG',
'db_conn']
| #!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG']
Add DB connection to amgut init module#!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
from amgut.lib.data_access.sql_connection import SQLConnectionHandler
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
db_conn = SQLConnectionHandler()
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG',
'db_conn']
| <commit_before>#!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG']
<commit_msg>Add DB connection to amgut init module<commit_after>#!/usr/bin/env python
from __future__ import division
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Project Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import importlib
from amgut.lib.config_manager import AMGUT_CONFIG
from amgut.lib.locale_data import media_locale
from redis import Redis
from amgut.lib.data_access.sql_connection import SQLConnectionHandler
r_server = Redis(host=AMGUT_CONFIG.redis_host,
port=AMGUT_CONFIG.redis_port,
db=AMGUT_CONFIG.redis_db_id)
db_conn = SQLConnectionHandler()
current_locale_module = '.'.join(['amgut.lib.locale_data',
AMGUT_CONFIG.locale])
try:
current_locale = importlib.import_module(current_locale_module)
except ImportError:
raise ImportError("Cannot import locale! %s" % current_locale_module)
text_locale = current_locale.text_locale
media_locale.update(current_locale.media_locale)
__all__ = ['r_server', 'text_locale', 'media_locale', 'AMGUT_CONFIG',
'db_conn']
|
d28e884d832b63bef1434476a378de9b7e333264 | samples/WavGenerator.py | samples/WavGenerator.py | #############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
| #############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
| Add a main function with command line arguments | Add a main function with command line arguments
Now able to generate wave files from command line | Python | mit | parrisha/raspi-visualizer | #############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
Add a main function with command line arguments
Now able to generate wave files from command line | #############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
| <commit_before>#############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
<commit_msg>Add a main function with command line arguments
Now able to generate wave files from command line<commit_after> | #############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
| #############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
Add a main function with command line arguments
Now able to generate wave files from command line#############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
| <commit_before>#############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample_'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
<commit_msg>Add a main function with command line arguments
Now able to generate wave files from command line<commit_after>#############
# ECE 612 Spring 2017
# Joe Parrish
#
# Use the same logic from SpectrumTester.py to generate multiple sine waves
# but write that output to a .wav file for file based testing of the project code
#############
import wave
import argparse
import numpy as np
def generate_sample_file(test_freqs, test_amps, chunk=4096, samplerate=44100):
filename = 'Sample'
x = np.arange(chunk)
y = np.zeros(chunk)
for test_freq,test_amp in zip(test_freqs,test_amps):
filename += '_' + str(test_freq) + 'Hz@' + str(test_amp)
y = np.add(y, np.sin(2 * np.pi * test_freq * x / samplerate) * test_amp)
filename += '.wav'
y = y.astype('i2')
wave_writer = wave.open(filename, mode='wb')
wave_writer.setnchannels(1)
wave_writer.setsampwidth(2)
wave_writer.setframerate(samplerate)
for x in range(0,8):
wave_writer.writeframes(y)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Write a wave file containing Numpy generated sine waves')
parser.add_argument('--freqs', nargs='+', type=int)
parser.add_argument('--amps', nargs='+', type=int)
args = parser.parse_args()
generate_sample_file(args.freqs, args.amps)
|
ee6ad550ebeeaebf7c0959932ec60cbd923d480e | plowshare/__init__.py | plowshare/__init__.py | from .plowshare import *
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from plowshare import *
| Add proper header and fix import. | Add proper header and fix import.
| Python | mit | Storj/plowshare-wrapper | from .plowshare import *
Add proper header and fix import. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from plowshare import *
| <commit_before>from .plowshare import *
<commit_msg>Add proper header and fix import.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from plowshare import *
| from .plowshare import *
Add proper header and fix import.#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from plowshare import *
| <commit_before>from .plowshare import *
<commit_msg>Add proper header and fix import.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 Storj Labs
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from plowshare import *
|
643c8896b23cc1d008ce1e2a278d4379fb3b9b08 | byceps/services/language/dbmodels.py | byceps/services/language/dbmodels.py | """
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'locales'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
| """
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'languages'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
| Fix name of languages table | Fix name of languages table
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | """
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'locales'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
Fix name of languages table | """
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'languages'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
| <commit_before>"""
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'locales'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
<commit_msg>Fix name of languages table<commit_after> | """
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'languages'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
| """
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'locales'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
Fix name of languages table"""
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'languages'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
| <commit_before>"""
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'locales'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
<commit_msg>Fix name of languages table<commit_after>"""
byceps.services.language.dbmodels
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from ...database import db
class Language(db.Model):
"""A language.
The code can be just `en` or `de`, but also `en-gb` or `de-de`.
"""
__tablename__ = 'languages'
code = db.Column(db.UnicodeText, primary_key=True)
def __init__(self, code: str) -> None:
self.code = code
|
c5e5b3d6c3d8cad75b1d2eac16179872dd415eb9 | scripts/asgard-deploy.py | scripts/asgard-deploy.py | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy() | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy() | Use environment var for AMI_ID. | Use environment var for AMI_ID.
| Python | agpl-3.0 | eltoncarr/tubular,eltoncarr/tubular | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()Use environment var for AMI_ID. | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy() | <commit_before>#!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()<commit_msg>Use environment var for AMI_ID.<commit_after> | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy() | #!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()Use environment var for AMI_ID.#!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy() | <commit_before>#!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy()<commit_msg>Use environment var for AMI_ID.<commit_after>#!/usr/bin/env python
import sys
import logging
import click
import tubular.asgard as asgard
logging.basicConfig(stream=sys.stdout, level=logging.ERROR)
@click.command()
@click.option('--ami_id', envvar='AMI_ID', help='The ami-id to deploy', required=True)
def deploy(ami_id):
try:
asgard.deploy(ami_id)
except Exception, e:
click.secho("Error Deploying AMI: {0}.\nMessage: {1}".format(ami_id, e.message), fg='red')
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
deploy() |
51a126c0ada7c00a99416b241bb1c11888e82836 | esmgrids/jra55_grid.py | esmgrids/jra55_grid.py |
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
| Use bounds to determing jra55 grid cell locations. | Use bounds to determing jra55 grid cell locations.
| Python | apache-2.0 | DoublePrecision/esmgrids |
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
Use bounds to determing jra55 grid cell locations. |
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
| <commit_before>
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
<commit_msg>Use bounds to determing jra55 grid cell locations.<commit_after> |
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
Use bounds to determing jra55 grid cell locations.
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
| <commit_before>
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
x_t = f.variables['lon'][:]
y_t = f.variables['lat'][1:-1]
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
<commit_msg>Use bounds to determing jra55 grid cell locations.<commit_after>
import numpy as np
import netCDF4 as nc
from .base_grid import BaseGrid
class Jra55Grid(BaseGrid):
def __init__(self, h_grid_def, description='JRA55 regular grid'):
self.type = 'Arakawa A'
self.full_name = 'JRA55'
with nc.Dataset(h_grid_def) as f:
lon_bnds = f.variables['lon_bnds'][:]
lat_bnds = f.variables['lat_bnds'][:]
dx_t = lon_bnds[:, 1] - lon_bnds[:, 0]
dy_t = lat_bnds[:, 1] - lat_bnds[:, 0]
x_t = lon_bnds[:, 0] + dx_t[:] / 2
y_t = lat_bnds[:, 0] + dy_t[:] / 2
super(Jra55Grid, self).__init__(x_t=x_t, y_t=y_t, description=description)
def fix_pole_holes(self):
clat_copy = np.copy(self.clat_t)
self.clat_t[2, -1,:] = 90.0
self.clat_t[3, -1,:] = 90.0
# Do South pole as well
self.clat_t[0, 0,:] = -90.0
self.clat_t[1, 0,:] = -90.0
|
6251bffd124e3ab960f6150ef66585a3653ef4cf | argus/backends/base.py | argus/backends/base.py | import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
| # Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
| Add the license header where it's missing. | Add the license header where it's missing.
| Python | apache-2.0 | micumatei/cloudbase-init-ci,stefan-caraiman/cloudbase-init-ci,AlexandruTudose/cloudbase-init-ci,PCManticore/argus-ci,cloudbase/cloudbase-init-ci,cmin764/argus-ci | import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
Add the license header where it's missing. | # Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
| <commit_before>import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
<commit_msg>Add the license header where it's missing.<commit_after> | # Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
| import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
Add the license header where it's missing.# Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
| <commit_before>import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
<commit_msg>Add the license header where it's missing.<commit_after># Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class BaseBackend(object):
@abc.abstractmethod
def setup_instance(self):
"""Called by setUpClass to setup an instance"""
@abc.abstractmethod
def cleanup(self):
"""Needs to cleanup the resources created in ``setup_instance``"""
|
32432291eea4b3b6d6ac3cf597102740ae83df28 | d_parser/helpers/re_set.py | d_parser/helpers/re_set.py | # re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.+(?P<int>\d+).+$')
| # re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.*?(?P<int>\d+).+$')
| Fix int extractor (greedy > lazy) | Fix int extractor (greedy > lazy)
| Python | mit | Holovin/D_GrabDemo | # re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.+(?P<int>\d+).+$')
Fix int extractor (greedy > lazy) | # re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.*?(?P<int>\d+).+$')
| <commit_before># re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.+(?P<int>\d+).+$')
<commit_msg>Fix int extractor (greedy > lazy)<commit_after> | # re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.*?(?P<int>\d+).+$')
| # re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.+(?P<int>\d+).+$')
Fix int extractor (greedy > lazy)# re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.*?(?P<int>\d+).+$')
| <commit_before># re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.+(?P<int>\d+).+$')
<commit_msg>Fix int extractor (greedy > lazy)<commit_after># re_set.py
# Module for generating regex rules
# r1
import re
class Ree:
float = None
number = None
page_number = None
extract_int = None
@staticmethod
def init():
Ree.is_float()
Ree.is_number()
Ree.is_page_number('')
Ree.extract_int_compile()
@staticmethod
def is_page_number(page_param):
Ree.page_number = re.compile('(?P<param>{})=(?P<page>\d+)'.format(page_param))
@staticmethod
def is_float(price_sep=',.'):
Ree.float = re.compile('(?P<price>\d+([{}]\d+)?)'.format(price_sep))
@staticmethod
def is_number():
Ree.number = re.compile('^\d+$')
@staticmethod
def extract_int_compile():
Ree.extract_int = re.compile('^.*?(?P<int>\d+).+$')
|
3ce0db9cccea34998674e340c1dcc7f49b487e9a | TweetPoster/twitter.py | TweetPoster/twitter.py | from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return r.json()
| from datetime import datetime
from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return Tweet(r.json())
class Tweet(object):
def __init__(self, json):
self.user = TwitterUser(json['user'])
self.text = json['text']
self.id = json['id']
self.in_reply_to = json['in_reply_to_status_id_str']
self.entities = json['entities']
self.link = 'https://twitter.com/{0}/status/{1}'.format(self.user.name, self.id)
self.datetime = datetime.strptime(json['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
class TwitterUser(object):
def __init__(self, json):
self.name = json['screen_name']
self.link = 'https://twitter.com/' + self.name
| Convert JSON into Tweet and TwitterUser objects | Convert JSON into Tweet and TwitterUser objects
| Python | mit | joealcorn/TweetPoster,tytek2012/TweetPoster,r3m0t/TweetPoster,aperson/TweetPoster | from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return r.json()
Convert JSON into Tweet and TwitterUser objects | from datetime import datetime
from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return Tweet(r.json())
class Tweet(object):
def __init__(self, json):
self.user = TwitterUser(json['user'])
self.text = json['text']
self.id = json['id']
self.in_reply_to = json['in_reply_to_status_id_str']
self.entities = json['entities']
self.link = 'https://twitter.com/{0}/status/{1}'.format(self.user.name, self.id)
self.datetime = datetime.strptime(json['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
class TwitterUser(object):
def __init__(self, json):
self.name = json['screen_name']
self.link = 'https://twitter.com/' + self.name
| <commit_before>from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return r.json()
<commit_msg>Convert JSON into Tweet and TwitterUser objects<commit_after> | from datetime import datetime
from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return Tweet(r.json())
class Tweet(object):
def __init__(self, json):
self.user = TwitterUser(json['user'])
self.text = json['text']
self.id = json['id']
self.in_reply_to = json['in_reply_to_status_id_str']
self.entities = json['entities']
self.link = 'https://twitter.com/{0}/status/{1}'.format(self.user.name, self.id)
self.datetime = datetime.strptime(json['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
class TwitterUser(object):
def __init__(self, json):
self.name = json['screen_name']
self.link = 'https://twitter.com/' + self.name
| from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return r.json()
Convert JSON into Tweet and TwitterUser objectsfrom datetime import datetime
from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return Tweet(r.json())
class Tweet(object):
def __init__(self, json):
self.user = TwitterUser(json['user'])
self.text = json['text']
self.id = json['id']
self.in_reply_to = json['in_reply_to_status_id_str']
self.entities = json['entities']
self.link = 'https://twitter.com/{0}/status/{1}'.format(self.user.name, self.id)
self.datetime = datetime.strptime(json['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
class TwitterUser(object):
def __init__(self, json):
self.name = json['screen_name']
self.link = 'https://twitter.com/' + self.name
| <commit_before>from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return r.json()
<commit_msg>Convert JSON into Tweet and TwitterUser objects<commit_after>from datetime import datetime
from requests_oauthlib import OAuth1
from TweetPoster import User, config
class Twitter(User):
def __init__(self, *a, **kw):
super(Twitter, self).__init__(*a, **kw)
self.session.auth = OAuth1(
config['twitter']['consumer_key'],
config['twitter']['consumer_secret'],
config['twitter']['access_token'],
config['twitter']['access_secret'],
signature_type='auth_header'
)
def get_tweet(self, tweet_id):
url = 'https://api.twitter.com/1.1/statuses/show.json'
params = {
'id': tweet_id,
'include_entities': 1,
}
r = self.get(url, params=params)
assert r.status_code == 200, r.status_code
return Tweet(r.json())
class Tweet(object):
def __init__(self, json):
self.user = TwitterUser(json['user'])
self.text = json['text']
self.id = json['id']
self.in_reply_to = json['in_reply_to_status_id_str']
self.entities = json['entities']
self.link = 'https://twitter.com/{0}/status/{1}'.format(self.user.name, self.id)
self.datetime = datetime.strptime(json['created_at'], '%a %b %d %H:%M:%S +0000 %Y')
class TwitterUser(object):
def __init__(self, json):
self.name = json['screen_name']
self.link = 'https://twitter.com/' + self.name
|
cb29f8522c4e90b7db82bac408bc16bd2f1d53da | cs251tk/common/run.py | cs251tk/common/run.py | import shlex
import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
# Stringified commands are passed in from the spec files.
# Otherwise, it needs to be an array.
if isinstance(cmd, str):
cmd = shlex.split(cmd)
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
| import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
| Revert "revert part of 27cd680" | Revert "revert part of 27cd680"
c491bbc2302ac7c95c180d31845c643804fa30d3
| Python | mit | StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit | import shlex
import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
# Stringified commands are passed in from the spec files.
# Otherwise, it needs to be an array.
if isinstance(cmd, str):
cmd = shlex.split(cmd)
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
Revert "revert part of 27cd680"
c491bbc2302ac7c95c180d31845c643804fa30d3 | import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
| <commit_before>import shlex
import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
# Stringified commands are passed in from the spec files.
# Otherwise, it needs to be an array.
if isinstance(cmd, str):
cmd = shlex.split(cmd)
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
<commit_msg>Revert "revert part of 27cd680"
c491bbc2302ac7c95c180d31845c643804fa30d3<commit_after> | import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
| import shlex
import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
# Stringified commands are passed in from the spec files.
# Otherwise, it needs to be an array.
if isinstance(cmd, str):
cmd = shlex.split(cmd)
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
Revert "revert part of 27cd680"
c491bbc2302ac7c95c180d31845c643804fa30d3import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
| <commit_before>import shlex
import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
# Stringified commands are passed in from the spec files.
# Otherwise, it needs to be an array.
if isinstance(cmd, str):
cmd = shlex.split(cmd)
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
<commit_msg>Revert "revert part of 27cd680"
c491bbc2302ac7c95c180d31845c643804fa30d3<commit_after>import copy
import os
from subprocess import STDOUT, run as _run, CalledProcessError, TimeoutExpired
# This env stuff is to catch glibc errors, because
# it apparently prints to /dev/tty instead of stderr.
# (see http://stackoverflow.com/a/27797579)
ENV = copy.copy(os.environ)
ENV["LIBC_FATAL_STDERR_"] = "1"
def run(cmd, input_data=None, timeout=None):
status = 'success'
try:
result = _run(
cmd,
stderr=STDOUT,
timeout=timeout,
input=input_data,
env=ENV,
check=True)
except CalledProcessError as err:
status = 'called process error'
result = err.output if err.output else str(err)
except TimeoutExpired as err:
status = 'timed out after {} seconds'.format(timeout)
result = err.output if err.output else str(err)
except FileNotFoundError as err:
status = 'not found'
result = str(err)
except ProcessLookupError as err:
try:
status, result = run(cmd, input_data=input_data, timeout=timeout)
except:
status = 'process lookup error'
result = str(err)
try:
if not isinstance(result, str):
result = str(result, 'utf-8')
except UnicodeDecodeError:
result = str(result, 'cp437')
return (status, result)
|
b836a7347d66e6fb8df2f97c011b875e24c91e17 | dashboard/consumers.py | dashboard/consumers.py | from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
message.reply_channel.send({
'accept': True
})
| from channels import Group
from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
Group('btc-price').add(message.reply_channel)
message.reply_channel.send({
'accept': True
})
| Add user to the group when he first connects | Add user to the group when he first connects
| Python | mit | alessandroHenrique/coinpricemonitor,alessandroHenrique/coinpricemonitor,alessandroHenrique/coinpricemonitor | from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
message.reply_channel.send({
'accept': True
})
Add user to the group when he first connects | from channels import Group
from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
Group('btc-price').add(message.reply_channel)
message.reply_channel.send({
'accept': True
})
| <commit_before>from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
message.reply_channel.send({
'accept': True
})
<commit_msg>Add user to the group when he first connects<commit_after> | from channels import Group
from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
Group('btc-price').add(message.reply_channel)
message.reply_channel.send({
'accept': True
})
| from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
message.reply_channel.send({
'accept': True
})
Add user to the group when he first connectsfrom channels import Group
from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
Group('btc-price').add(message.reply_channel)
message.reply_channel.send({
'accept': True
})
| <commit_before>from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
message.reply_channel.send({
'accept': True
})
<commit_msg>Add user to the group when he first connects<commit_after>from channels import Group
from channels.auth import channel_session_user, channel_session_user_from_http
@channel_session_user_from_http
def ws_connect(message):
Group('btc-price').add(message.reply_channel)
message.reply_channel.send({
'accept': True
})
|
aeaf802100cd6869178dd9f412d35e452916a63d | common/commands/view_manipulation.py | common/commands/view_manipulation.py | from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
# NeoVintageous renamed the command starting with v1.22.0.
# We call both commands for backwards compatibility.
self.view.run_command("_enter_insert_mode")
self.view.run_command("nv_enter_insert_mode") # since NeoVintageous 1.22.0
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| Fix `vintageous_enter_insert_mode` for NeoVintageous 1.22.0 | Fix `vintageous_enter_insert_mode` for NeoVintageous 1.22.0
Fixes #1395
In NeoVintageous/NeoVintageous#749, pushed as 1.22.0 (Oct 2020), the
relevant commands were renamed.
We follow the new names, but for now also call the old ones.
| Python | mit | divmain/GitSavvy,divmain/GitSavvy,divmain/GitSavvy | from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
Fix `vintageous_enter_insert_mode` for NeoVintageous 1.22.0
Fixes #1395
In NeoVintageous/NeoVintageous#749, pushed as 1.22.0 (Oct 2020), the
relevant commands were renamed.
We follow the new names, but for now also call the old ones. | from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
# NeoVintageous renamed the command starting with v1.22.0.
# We call both commands for backwards compatibility.
self.view.run_command("_enter_insert_mode")
self.view.run_command("nv_enter_insert_mode") # since NeoVintageous 1.22.0
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| <commit_before>from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
<commit_msg>Fix `vintageous_enter_insert_mode` for NeoVintageous 1.22.0
Fixes #1395
In NeoVintageous/NeoVintageous#749, pushed as 1.22.0 (Oct 2020), the
relevant commands were renamed.
We follow the new names, but for now also call the old ones.<commit_after> | from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
# NeoVintageous renamed the command starting with v1.22.0.
# We call both commands for backwards compatibility.
self.view.run_command("_enter_insert_mode")
self.view.run_command("nv_enter_insert_mode") # since NeoVintageous 1.22.0
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
Fix `vintageous_enter_insert_mode` for NeoVintageous 1.22.0
Fixes #1395
In NeoVintageous/NeoVintageous#749, pushed as 1.22.0 (Oct 2020), the
relevant commands were renamed.
We follow the new names, but for now also call the old ones.from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
# NeoVintageous renamed the command starting with v1.22.0.
# We call both commands for backwards compatibility.
self.view.run_command("_enter_insert_mode")
self.view.run_command("nv_enter_insert_mode") # since NeoVintageous 1.22.0
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| <commit_before>from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
<commit_msg>Fix `vintageous_enter_insert_mode` for NeoVintageous 1.22.0
Fixes #1395
In NeoVintageous/NeoVintageous#749, pushed as 1.22.0 (Oct 2020), the
relevant commands were renamed.
We follow the new names, but for now also call the old ones.<commit_after>from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
# NeoVintageous renamed the command starting with v1.22.0.
# We call both commands for backwards compatibility.
self.view.run_command("_enter_insert_mode")
self.view.run_command("nv_enter_insert_mode") # since NeoVintageous 1.22.0
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
|
a55b96a7d64643af6d2adcd6a15fe3348c5d1c41 | dbaas/workflow/settings.py | dbaas/workflow/settings.py | DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
| DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
| Add create dns on main workflow | Add create dns on main workflow
| Python | bsd-3-clause | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
Add create dns on main workflow | DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
| <commit_before>DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
<commit_msg>Add create dns on main workflow<commit_after> | DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
| DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
Add create dns on main workflowDEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
| <commit_before>DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
<commit_msg>Add create dns on main workflow<commit_after>DEPLOY_MYSQL_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
DEPLOY_VIRTUALMACHINE = (
'workflow.steps.build_databaseinfra.BuildDatabaseInfra',
'workflow.steps.create_virtualmachines.CreateVirtualMachine',
'workflow.steps.create_dns.CreateDns'
)
DEPLOY_MONGO_WORKFLOW = (
'util.gen_names',
'util.gen_dbinfra',
'dbaas_cloudstack.create_vm'
)
|
6e9a19d362005125036f5c0ecdbe88fc1c4f01aa | product_computed_list_price/__init__.py | product_computed_list_price/__init__.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product
from . import pricelist
| FIX price type 'list_price' inactivation | FIX price type 'list_price' inactivation
| Python | agpl-3.0 | ingadhoc/product,ingadhoc/product | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import productFIX price type 'list_price' inactivation | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product
from . import pricelist
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product<commit_msg>FIX price type 'list_price' inactivation<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product
from . import pricelist
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import productFIX price type 'list_price' inactivation# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product
from . import pricelist
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product<commit_msg>FIX price type 'list_price' inactivation<commit_after># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from . import product
from . import pricelist
|
f2ea241e9bb6e5e927a90c56438bf7883ae3744f | siemstress/__init__.py | siemstress/__init__.py | __version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
| __version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
import siemstress.trigger
| Add trigger to module import | Add trigger to module import
| Python | mit | dogoncouch/siemstress | __version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
Add trigger to module import | __version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
import siemstress.trigger
| <commit_before>__version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
<commit_msg>Add trigger to module import<commit_after> | __version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
import siemstress.trigger
| __version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
Add trigger to module import__version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
import siemstress.trigger
| <commit_before>__version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
<commit_msg>Add trigger to module import<commit_after>__version__ = '0.2'
__author__ = 'Dan Persons <dpersonsdev@gmail.com>'
__license__ = 'MIT License'
__github__ = 'https://github.com/dogoncouch/siemstress'
__all__ = ['core', 'querycore', 'query']
import siemstress.query
import siemstress.trigger
|
3b0608e11da620f1e12aeb270dbaf2f255a35cec | Cura/Qt/Bindings/ControllerProxy.py | Cura/Qt/Bindings/ControllerProxy.py | from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
| from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
from Cura.Scene.BoxRenderer import BoxRenderer
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
| Add a (temporary) bounding box around an added mesh | Add a (temporary) bounding box around an added mesh
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium | from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
Add a (temporary) bounding box around an added mesh | from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
from Cura.Scene.BoxRenderer import BoxRenderer
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
| <commit_before>from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
<commit_msg>Add a (temporary) bounding box around an added mesh<commit_after> | from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
from Cura.Scene.BoxRenderer import BoxRenderer
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
| from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
Add a (temporary) bounding box around an added meshfrom PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
from Cura.Scene.BoxRenderer import BoxRenderer
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
| <commit_before>from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
<commit_msg>Add a (temporary) bounding box around an added mesh<commit_after>from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl
from Cura.Application import Application
from Cura.Scene.SceneNode import SceneNode
from Cura.Scene.BoxRenderer import BoxRenderer
class ControllerProxy(QObject):
def __init__(self, parent = None):
super().__init__(parent)
self._controller = Application.getInstance().getController()
@pyqtSlot(str)
def setActiveView(self, view):
self._controller.setActiveView(view)
@pyqtSlot(str)
def setActiveTool(self, tool):
self._controller.setActiveTool(tool)
@pyqtSlot(QUrl)
def addMesh(self, file_name):
if not file_name.isValid():
return
mesh = SceneNode(self._controller.getScene().getRoot())
app = Application.getInstance()
mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
|
e82ab299a6c68f682a9f9b769e79cf2054684e3b | reviewboard/attachments/evolutions/file_attachment_uuid.py | reviewboard/attachments/evolutions/file_attachment_uuid.py | from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
| from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
| Fix the FileAttachment.uuid evolution to match the model field. | Fix the FileAttachment.uuid evolution to match the model field.
The evolution that was included in the existing code didn't match the
definition of the field. This is a very simple fix.
Testing done:
Ran evolutions.
Reviewed at https://reviews.reviewboard.org/r/8141/
| Python | mit | davidt/reviewboard,davidt/reviewboard,reviewboard/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,brennie/reviewboard,reviewboard/reviewboard,brennie/reviewboard,sgallagher/reviewboard,chipx86/reviewboard,brennie/reviewboard,chipx86/reviewboard,reviewboard/reviewboard,davidt/reviewboard,chipx86/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,sgallagher/reviewboard,brennie/reviewboard,davidt/reviewboard | from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
Fix the FileAttachment.uuid evolution to match the model field.
The evolution that was included in the existing code didn't match the
definition of the field. This is a very simple fix.
Testing done:
Ran evolutions.
Reviewed at https://reviews.reviewboard.org/r/8141/ | from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
| <commit_before>from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
<commit_msg>Fix the FileAttachment.uuid evolution to match the model field.
The evolution that was included in the existing code didn't match the
definition of the field. This is a very simple fix.
Testing done:
Ran evolutions.
Reviewed at https://reviews.reviewboard.org/r/8141/<commit_after> | from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
| from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
Fix the FileAttachment.uuid evolution to match the model field.
The evolution that was included in the existing code didn't match the
definition of the field. This is a very simple fix.
Testing done:
Ran evolutions.
Reviewed at https://reviews.reviewboard.org/r/8141/from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
| <commit_before>from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=None, null=True),
]
<commit_msg>Fix the FileAttachment.uuid evolution to match the model field.
The evolution that was included in the existing code didn't match the
definition of the field. This is a very simple fix.
Testing done:
Ran evolutions.
Reviewed at https://reviews.reviewboard.org/r/8141/<commit_after>from __future__ import unicode_literals
from django_evolution.mutations import AddField
from django.db import models
MUTATIONS = [
AddField('FileAttachment', 'uuid', models.CharField, max_length=255,
initial=''),
]
|
3e33849ded2c69760ce93b4b1e9ab8094904040f | space-age/space_age.py | space-age/space_age.py | class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
| class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
| Implement __getattr__ to reduce code | Implement __getattr__ to reduce code
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
Implement __getattr__ to reduce code | class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
| <commit_before>class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
<commit_msg>Implement __getattr__ to reduce code<commit_after> | class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
| class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
Implement __getattr__ to reduce codeclass SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
| <commit_before>class SpaceAge(object):
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def on_earth(self):
return round(self.years, 2)
def on_mercury(self):
return round(self.years/0.2408467, 2)
def on_venus(self):
return round(self.years/0.6151976, 2)
def on_mars(self):
return round(self.years/1.8808158, 2)
def on_jupiter(self):
return round(self.years/11.862615, 2)
def on_saturn(self):
return round(self.years/29.447498, 2)
def on_uranus(self):
return round(self.years/84.016846, 2)
def on_neptune(self):
return round(self.years/164.79132, 2)
<commit_msg>Implement __getattr__ to reduce code<commit_after>class SpaceAge(object):
YEARS = {"on_earth": 1,
"on_mercury": 0.2408467,
"on_venus": 0.61519726,
"on_mars": 1.8808158,
"on_jupiter": 11.862615,
"on_saturn": 29.447498,
"on_uranus": 84.016846,
"on_neptune": 164.79132}
def __init__(self, seconds):
self.seconds = seconds
@property
def years(self):
return self.seconds/31557600
def __getattr__(self, on_planet):
if on_planet in SpaceAge.YEARS:
return lambda: round(self.years/SpaceAge.YEARS[on_planet], 2)
else:
raise AttributeError
|
486d3ab08858c2a872732f0efd82fe2fb0054366 | relay_api/api/main.py | relay_api/api/main.py | from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def get_relay(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
| from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def set_relay_on(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
| Change the new endpoint function name | Change the new endpoint function name
| Python | mit | pahumadad/raspi-relay-api | from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def get_relay(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
Change the new endpoint function name | from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def set_relay_on(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
| <commit_before>from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def get_relay(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
<commit_msg>Change the new endpoint function name<commit_after> | from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def set_relay_on(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
| from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def get_relay(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
Change the new endpoint function namefrom flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def set_relay_on(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
| <commit_before>from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def get_relay(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
<commit_msg>Change the new endpoint function name<commit_after>from flask import Flask
import relay_api.api.backend as backend
server = Flask(__name__)
backend.init_relays()
@server.route("/relay-api/relays/", methods=["GET"])
def get_all_relays():
js = backend.get_all_relays()
return js, 200
@server.route("/relay-api/relays/<relay_name>", methods=["GET"])
def get_relay(relay_name):
js = backend.get_relay(relay_name)
if not js:
return "", 404
return js, 200
@server.route("/relay-api/relays/<relay_name>/on", methods=["PUT"])
def set_relay_on(relay_name):
js = backend.set_relay_on(relay_name)
if not js:
return "", 404
return js, 200
|
15a5e6c1aca706330147475984848dfc33fd1a9d | common/djangoapps/mitxmako/tests.py | common/djangoapps/mitxmako/tests.py | from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
| from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
| Fix test so that it works with both CMS and LMS settings | Fix test so that it works with both CMS and LMS settings
| Python | agpl-3.0 | nanolearningllc/edx-platform-cypress,jjmiranda/edx-platform,SivilTaram/edx-platform,olexiim/edx-platform,eduNEXT/edx-platform,bdero/edx-platform,olexiim/edx-platform,nanolearningllc/edx-platform-cypress-2,cyanna/edx-platform,rismalrv/edx-platform,don-github/edx-platform,unicri/edx-platform,xuxiao19910803/edx-platform,Unow/edx-platform,kxliugang/edx-platform,shabab12/edx-platform,Edraak/edraak-platform,rismalrv/edx-platform,DefyVentures/edx-platform,eduNEXT/edunext-platform,beacloudgenius/edx-platform,mjirayu/sit_academy,LICEF/edx-platform,wwj718/edx-platform,EduPepperPDTesting/pepper2013-testing,mitocw/edx-platform,vismartltd/edx-platform,alexthered/kienhoc-platform,devs1991/test_edx_docmode,dkarakats/edx-platform,ak2703/edx-platform,wwj718/edx-platform,zhenzhai/edx-platform,jswope00/GAI,yokose-ks/edx-platform,Kalyzee/edx-platform,inares/edx-platform,mbareta/edx-platform-ft,PepperPD/edx-pepper-platform,arifsetiawan/edx-platform,itsjeyd/edx-platform,PepperPD/edx-pepper-platform,mbareta/edx-platform-ft,eestay/edx-platform,jamiefolsom/edx-platform,martynovp/edx-platform,DefyVentures/edx-platform,vasyarv/edx-platform,zofuthan/edx-platform,antoviaque/edx-platform,zadgroup/edx-platform,dsajkl/123,waheedahmed/edx-platform,arbrandes/edx-platform,hkawasaki/kawasaki-aio8-2,olexiim/edx-platform,nttks/jenkins-test,Softmotions/edx-platform,longmen21/edx-platform,solashirai/edx-platform,polimediaupv/edx-platform,torchingloom/edx-platform,xuxiao19910803/edx-platform,adoosii/edx-platform,Unow/edx-platform,ubc/edx-platform,pomegranited/edx-platform,jruiperezv/ANALYSE,cpennington/edx-platform,fly19890211/edx-platform,leansoft/edx-platform,raccoongang/edx-platform,yokose-ks/edx-platform,Livit/Livit.Learn.EdX,kursitet/edx-platform,J861449197/edx-platform,Edraak/circleci-edx-platform,hkawasaki/kawasaki-aio8-1,dcosentino/edx-platform,hkawasaki/kawasaki-aio8-0,playm2mboy/edx-platform,stvstnfrd/edx-platform,martynovp/edx-platform,UXE/local-edx,OmarIthawi/edx-platform,valtech-mooc/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearning/edx-platform,teltek/edx-platform,Lektorium-LLC/edx-platform,Ayub-Khan/edx-platform,xuxiao19910803/edx-platform,rismalrv/edx-platform,mjirayu/sit_academy,etzhou/edx-platform,dkarakats/edx-platform,eduNEXT/edx-platform,EduPepperPD/pepper2013,morpheby/levelup-by,Unow/edx-platform,abdoosh00/edx-rtl-final,shurihell/testasia,eestay/edx-platform,yokose-ks/edx-platform,msegado/edx-platform,cognitiveclass/edx-platform,WatanabeYasumasa/edx-platform,sameetb-cuelogic/edx-platform-test,shashank971/edx-platform,appsembler/edx-platform,defance/edx-platform,waheedahmed/edx-platform,UOMx/edx-platform,CourseTalk/edx-platform,gsehub/edx-platform,rationalAgent/edx-platform-custom,abdoosh00/edx-rtl-final,peterm-itr/edx-platform,utecuy/edx-platform,jazkarta/edx-platform-for-isc,ahmadiga/min_edx,synergeticsedx/deployment-wipro,ESOedX/edx-platform,andyzsf/edx,ovnicraft/edx-platform,cyanna/edx-platform,TsinghuaX/edx-platform,rhndg/openedx,alexthered/kienhoc-platform,TsinghuaX/edx-platform,carsongee/edx-platform,franosincic/edx-platform,xuxiao19910803/edx-platform,TsinghuaX/edx-platform,benpatterson/edx-platform,kmoocdev/edx-platform,DNFcode/edx-platform,hastexo/edx-platform,kursitet/edx-platform,IndonesiaX/edx-platform,cselis86/edx-platform,a-parhom/edx-platform,stvstnfrd/edx-platform,BehavioralInsightsTeam/edx-platform,shashank971/edx-platform,rue89-tech/edx-platform,bdero/edx-platform,itsjeyd/edx-platform,atsolakid/edx-platform,ahmadiga/min_edx,vikas1885/test1,fintech-circle/edx-platform,hmcmooc/muddx-platform,LICEF/edx-platform,gymnasium/edx-platform,ferabra/edx-platform,jazkarta/edx-platform,romain-li/edx-platform,eemirtekin/edx-platform,deepsrijit1105/edx-platform,ampax/edx-platform,nanolearningllc/edx-platform-cypress-2,eduNEXT/edunext-platform,IONISx/edx-platform,jruiperezv/ANALYSE,utecuy/edx-platform,antoviaque/edx-platform,chudaol/edx-platform,zadgroup/edx-platform,hkawasaki/kawasaki-aio8-0,hkawasaki/kawasaki-aio8-1,zadgroup/edx-platform,caesar2164/edx-platform,nanolearning/edx-platform,jolyonb/edx-platform,zubair-arbi/edx-platform,msegado/edx-platform,Endika/edx-platform,doismellburning/edx-platform,nanolearningllc/edx-platform-cypress-2,ahmadio/edx-platform,kamalx/edx-platform,mahendra-r/edx-platform,TeachAtTUM/edx-platform,jamiefolsom/edx-platform,hkawasaki/kawasaki-aio8-1,ampax/edx-platform,itsjeyd/edx-platform,valtech-mooc/edx-platform,EDUlib/edx-platform,shubhdev/edxOnBaadal,eestay/edx-platform,chand3040/cloud_that,LICEF/edx-platform,RPI-OPENEDX/edx-platform,solashirai/edx-platform,zerobatu/edx-platform,eestay/edx-platform,jswope00/GAI,jonathan-beard/edx-platform,pelikanchik/edx-platform,wwj718/ANALYSE,beacloudgenius/edx-platform,jamesblunt/edx-platform,romain-li/edx-platform,TeachAtTUM/edx-platform,simbs/edx-platform,synergeticsedx/deployment-wipro,Lektorium-LLC/edx-platform,Kalyzee/edx-platform,RPI-OPENEDX/edx-platform,DefyVentures/edx-platform,arbrandes/edx-platform,CredoReference/edx-platform,chauhanhardik/populo_2,apigee/edx-platform,ampax/edx-platform-backup,doganov/edx-platform,AkA84/edx-platform,IONISx/edx-platform,ahmadio/edx-platform,MSOpenTech/edx-platform,utecuy/edx-platform,praveen-pal/edx-platform,zofuthan/edx-platform,hkawasaki/kawasaki-aio8-2,LearnEra/LearnEraPlaftform,romain-li/edx-platform,hmcmooc/muddx-platform,rhndg/openedx,CredoReference/edx-platform,motion2015/a3,abdoosh00/edraak,shubhdev/openedx,jolyonb/edx-platform,jazkarta/edx-platform-for-isc,mushtaqak/edx-platform,cecep-edu/edx-platform,sameetb-cuelogic/edx-platform-test,ZLLab-Mooc/edx-platform,DNFcode/edx-platform,B-MOOC/edx-platform,Livit/Livit.Learn.EdX,UOMx/edx-platform,B-MOOC/edx-platform,J861449197/edx-platform,tiagochiavericosta/edx-platform,utecuy/edx-platform,halvertoluke/edx-platform,don-github/edx-platform,jelugbo/tundex,AkA84/edx-platform,teltek/edx-platform,ovnicraft/edx-platform,cselis86/edx-platform,pomegranited/edx-platform,ubc/edx-platform,miptliot/edx-platform,hamzehd/edx-platform,jazkarta/edx-platform-for-isc,gymnasium/edx-platform,Semi-global/edx-platform,jelugbo/tundex,bigdatauniversity/edx-platform,PepperPD/edx-pepper-platform,motion2015/edx-platform,zhenzhai/edx-platform,franosincic/edx-platform,fly19890211/edx-platform,leansoft/edx-platform,philanthropy-u/edx-platform,ahmadiga/min_edx,SravanthiSinha/edx-platform,jbzdak/edx-platform,proversity-org/edx-platform,Endika/edx-platform,kmoocdev2/edx-platform,zofuthan/edx-platform,Livit/Livit.Learn.EdX,atsolakid/edx-platform,hkawasaki/kawasaki-aio8-2,pomegranited/edx-platform,Ayub-Khan/edx-platform,doganov/edx-platform,marcore/edx-platform,jzoldak/edx-platform,jzoldak/edx-platform,nttks/jenkins-test,openfun/edx-platform,eemirtekin/edx-platform,jamesblunt/edx-platform,Kalyzee/edx-platform,JCBarahona/edX,RPI-OPENEDX/edx-platform,JioEducation/edx-platform,Stanford-Online/edx-platform,JioEducation/edx-platform,playm2mboy/edx-platform,solashirai/edx-platform,devs1991/test_edx_docmode,wwj718/edx-platform,xuxiao19910803/edx,mjirayu/sit_academy,cognitiveclass/edx-platform,xingyepei/edx-platform,4eek/edx-platform,yokose-ks/edx-platform,Edraak/circleci-edx-platform,Edraak/edraak-platform,y12uc231/edx-platform,xingyepei/edx-platform,inares/edx-platform,miptliot/edx-platform,pabloborrego93/edx-platform,longmen21/edx-platform,proversity-org/edx-platform,chauhanhardik/populo_2,hamzehd/edx-platform,cselis86/edx-platform,appliedx/edx-platform,xuxiao19910803/edx,lduarte1991/edx-platform,nagyistoce/edx-platform,shashank971/edx-platform,polimediaupv/edx-platform,praveen-pal/edx-platform,shubhdev/openedx,edx-solutions/edx-platform,cyanna/edx-platform,hamzehd/edx-platform,EduPepperPDTesting/pepper2013-testing,romain-li/edx-platform,SivilTaram/edx-platform,EduPepperPD/pepper2013,vasyarv/edx-platform,jbassen/edx-platform,miptliot/edx-platform,nagyistoce/edx-platform,PepperPD/edx-pepper-platform,deepsrijit1105/edx-platform,zadgroup/edx-platform,4eek/edx-platform,unicri/edx-platform,kxliugang/edx-platform,RPI-OPENEDX/edx-platform,deepsrijit1105/edx-platform,mushtaqak/edx-platform,ovnicraft/edx-platform,zerobatu/edx-platform,zofuthan/edx-platform,naresh21/synergetics-edx-platform,jruiperezv/ANALYSE,inares/edx-platform,motion2015/a3,shubhdev/edx-platform,motion2015/a3,OmarIthawi/edx-platform,longmen21/edx-platform,kamalx/edx-platform,mbareta/edx-platform-ft,stvstnfrd/edx-platform,sudheerchintala/LearnEraPlatForm,waheedahmed/edx-platform,vasyarv/edx-platform,jbassen/edx-platform,morpheby/levelup-by,kamalx/edx-platform,kmoocdev/edx-platform,doganov/edx-platform,carsongee/edx-platform,appliedx/edx-platform,MSOpenTech/edx-platform,TeachAtTUM/edx-platform,Kalyzee/edx-platform,zhenzhai/edx-platform,pku9104038/edx-platform,msegado/edx-platform,vismartltd/edx-platform,torchingloom/edx-platform,shashank971/edx-platform,EduPepperPD/pepper2013,nanolearning/edx-platform,cognitiveclass/edx-platform,chrisndodge/edx-platform,tiagochiavericosta/edx-platform,jzoldak/edx-platform,simbs/edx-platform,pabloborrego93/edx-platform,torchingloom/edx-platform,rue89-tech/edx-platform,jolyonb/edx-platform,MSOpenTech/edx-platform,auferack08/edx-platform,nikolas/edx-platform,Endika/edx-platform,EDUlib/edx-platform,4eek/edx-platform,SivilTaram/edx-platform,CourseTalk/edx-platform,xinjiguaike/edx-platform,syjeon/new_edx,kmoocdev/edx-platform,kxliugang/edx-platform,kalebhartje/schoolboost,Edraak/edx-platform,shabab12/edx-platform,nagyistoce/edx-platform,synergeticsedx/deployment-wipro,ZLLab-Mooc/edx-platform,nttks/jenkins-test,pku9104038/edx-platform,philanthropy-u/edx-platform,teltek/edx-platform,Semi-global/edx-platform,dsajkl/123,IONISx/edx-platform,chauhanhardik/populo,JioEducation/edx-platform,lduarte1991/edx-platform,zubair-arbi/edx-platform,JioEducation/edx-platform,xinjiguaike/edx-platform,abdoosh00/edx-rtl-final,B-MOOC/edx-platform,nikolas/edx-platform,nttks/edx-platform,etzhou/edx-platform,cpennington/edx-platform,mushtaqak/edx-platform,Semi-global/edx-platform,SravanthiSinha/edx-platform,angelapper/edx-platform,playm2mboy/edx-platform,inares/edx-platform,jelugbo/tundex,EduPepperPDTesting/pepper2013-testing,pdehaye/theming-edx-platform,LICEF/edx-platform,xinjiguaike/edx-platform,openfun/edx-platform,unicri/edx-platform,Shrhawk/edx-platform,don-github/edx-platform,edry/edx-platform,jazztpt/edx-platform,torchingloom/edx-platform,tanmaykm/edx-platform,beacloudgenius/edx-platform,nttks/edx-platform,IITBinterns13/edx-platform-dev,kalebhartje/schoolboost,doismellburning/edx-platform,franosincic/edx-platform,vismartltd/edx-platform,arifsetiawan/edx-platform,kalebhartje/schoolboost,jruiperezv/ANALYSE,ovnicraft/edx-platform,kmoocdev2/edx-platform,jamesblunt/edx-platform,nttks/jenkins-test,alu042/edx-platform,chauhanhardik/populo_2,mtlchun/edx,syjeon/new_edx,sudheerchintala/LearnEraPlatForm,simbs/edx-platform,hamzehd/edx-platform,hastexo/edx-platform,10clouds/edx-platform,IITBinterns13/edx-platform-dev,valtech-mooc/edx-platform,B-MOOC/edx-platform,etzhou/edx-platform,mahendra-r/edx-platform,nanolearningllc/edx-platform-cypress,jazztpt/edx-platform,dkarakats/edx-platform,mjg2203/edx-platform-seas,procangroup/edx-platform,antonve/s4-project-mooc,BehavioralInsightsTeam/edx-platform,xingyepei/edx-platform,rue89-tech/edx-platform,J861449197/edx-platform,jswope00/GAI,benpatterson/edx-platform,beni55/edx-platform,pabloborrego93/edx-platform,beni55/edx-platform,pomegranited/edx-platform,jazztpt/edx-platform,prarthitm/edxplatform,unicri/edx-platform,vismartltd/edx-platform,arifsetiawan/edx-platform,MSOpenTech/edx-platform,MakeHer/edx-platform,etzhou/edx-platform,BehavioralInsightsTeam/edx-platform,zerobatu/edx-platform,shubhdev/edxOnBaadal,Shrhawk/edx-platform,edry/edx-platform,abdoosh00/edraak,jazkarta/edx-platform,pku9104038/edx-platform,nikolas/edx-platform,mahendra-r/edx-platform,stvstnfrd/edx-platform,mushtaqak/edx-platform,valtech-mooc/edx-platform,nanolearningllc/edx-platform-cypress,MakeHer/edx-platform,procangroup/edx-platform,ampax/edx-platform-backup,knehez/edx-platform,lduarte1991/edx-platform,zhenzhai/edx-platform,ferabra/edx-platform,benpatterson/edx-platform,andyzsf/edx,abdoosh00/edraak,Edraak/edx-platform,fintech-circle/edx-platform,wwj718/edx-platform,jonathan-beard/edx-platform,mahendra-r/edx-platform,Kalyzee/edx-platform,eestay/edx-platform,doismellburning/edx-platform,chudaol/edx-platform,bitifirefly/edx-platform,devs1991/test_edx_docmode,knehez/edx-platform,antoviaque/edx-platform,cecep-edu/edx-platform,alu042/edx-platform,solashirai/edx-platform,beacloudgenius/edx-platform,Unow/edx-platform,a-parhom/edx-platform,jbassen/edx-platform,dcosentino/edx-platform,jswope00/GAI,dsajkl/reqiop,y12uc231/edx-platform,olexiim/edx-platform,playm2mboy/edx-platform,beni55/edx-platform,knehez/edx-platform,J861449197/edx-platform,kalebhartje/schoolboost,mjg2203/edx-platform-seas,defance/edx-platform,mitocw/edx-platform,praveen-pal/edx-platform,ampax/edx-platform-backup,RPI-OPENEDX/edx-platform,chand3040/cloud_that,BehavioralInsightsTeam/edx-platform,jbassen/edx-platform,alu042/edx-platform,adoosii/edx-platform,martynovp/edx-platform,IONISx/edx-platform,jolyonb/edx-platform,dsajkl/123,wwj718/edx-platform,chauhanhardik/populo,chudaol/edx-platform,cognitiveclass/edx-platform,ampax/edx-platform,utecuy/edx-platform,bitifirefly/edx-platform,eemirtekin/edx-platform,J861449197/edx-platform,UOMx/edx-platform,JCBarahona/edX,apigee/edx-platform,caesar2164/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,ZLLab-Mooc/edx-platform,philanthropy-u/edx-platform,tiagochiavericosta/edx-platform,vasyarv/edx-platform,Stanford-Online/edx-platform,morenopc/edx-platform,hastexo/edx-platform,jjmiranda/edx-platform,shashank971/edx-platform,fly19890211/edx-platform,jzoldak/edx-platform,polimediaupv/edx-platform,shurihell/testasia,jbassen/edx-platform,chrisndodge/edx-platform,xingyepei/edx-platform,kamalx/edx-platform,edx-solutions/edx-platform,ahmedaljazzar/edx-platform,vismartltd/edx-platform,benpatterson/edx-platform,CredoReference/edx-platform,dsajkl/123,rationalAgent/edx-platform-custom,OmarIthawi/edx-platform,Edraak/circleci-edx-platform,EDUlib/edx-platform,leansoft/edx-platform,iivic/BoiseStateX,antonve/s4-project-mooc,carsongee/edx-platform,louyihua/edx-platform,jbzdak/edx-platform,kursitet/edx-platform,philanthropy-u/edx-platform,PepperPD/edx-pepper-platform,zubair-arbi/edx-platform,praveen-pal/edx-platform,defance/edx-platform,rismalrv/edx-platform,ubc/edx-platform,mbareta/edx-platform-ft,WatanabeYasumasa/edx-platform,peterm-itr/edx-platform,arbrandes/edx-platform,ampax/edx-platform-backup,appliedx/edx-platform,morenopc/edx-platform,CredoReference/edx-platform,antonve/s4-project-mooc,fly19890211/edx-platform,rationalAgent/edx-platform-custom,jazkarta/edx-platform,eemirtekin/edx-platform,mjirayu/sit_academy,LICEF/edx-platform,analyseuc3m/ANALYSE-v1,cpennington/edx-platform,pdehaye/theming-edx-platform,analyseuc3m/ANALYSE-v1,ahmadiga/min_edx,chauhanhardik/populo_2,syjeon/new_edx,halvertoluke/edx-platform,Semi-global/edx-platform,bigdatauniversity/edx-platform,shurihell/testasia,fintech-circle/edx-platform,hkawasaki/kawasaki-aio8-0,kmoocdev/edx-platform,sudheerchintala/LearnEraPlatForm,msegado/edx-platform,jazztpt/edx-platform,EduPepperPD/pepper2013,4eek/edx-platform,dsajkl/reqiop,Endika/edx-platform,mcgachey/edx-platform,pku9104038/edx-platform,motion2015/a3,leansoft/edx-platform,shubhdev/openedx,SivilTaram/edx-platform,raccoongang/edx-platform,chrisndodge/edx-platform,mjg2203/edx-platform-seas,valtech-mooc/edx-platform,pdehaye/theming-edx-platform,jswope00/griffinx,ahmedaljazzar/edx-platform,atsolakid/edx-platform,edx/edx-platform,nikolas/edx-platform,ovnicraft/edx-platform,ZLLab-Mooc/edx-platform,peterm-itr/edx-platform,unicri/edx-platform,dkarakats/edx-platform,mitocw/edx-platform,10clouds/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,mtlchun/edx,LearnEra/LearnEraPlaftform,analyseuc3m/ANALYSE-v1,jazkarta/edx-platform-for-isc,waheedahmed/edx-platform,edx/edx-platform,nanolearningllc/edx-platform-cypress-2,defance/edx-platform,Edraak/circleci-edx-platform,mushtaqak/edx-platform,nanolearningllc/edx-platform-cypress-2,tiagochiavericosta/edx-platform,polimediaupv/edx-platform,Shrhawk/edx-platform,dcosentino/edx-platform,IITBinterns13/edx-platform-dev,EduPepperPD/pepper2013,jbzdak/edx-platform,don-github/edx-platform,torchingloom/edx-platform,gsehub/edx-platform,vasyarv/edx-platform,angelapper/edx-platform,ESOedX/edx-platform,kmoocdev/edx-platform,simbs/edx-platform,LearnEra/LearnEraPlaftform,antonve/s4-project-mooc,itsjeyd/edx-platform,Lektorium-LLC/edx-platform,marcore/edx-platform,appsembler/edx-platform,AkA84/edx-platform,fintech-circle/edx-platform,tiagochiavericosta/edx-platform,a-parhom/edx-platform,appsembler/edx-platform,cselis86/edx-platform,iivic/BoiseStateX,ahmedaljazzar/edx-platform,chauhanhardik/populo,hmcmooc/muddx-platform,halvertoluke/edx-platform,sudheerchintala/LearnEraPlatForm,pelikanchik/edx-platform,zofuthan/edx-platform,TeachAtTUM/edx-platform,tanmaykm/edx-platform,bdero/edx-platform,procangroup/edx-platform,hamzehd/edx-platform,appliedx/edx-platform,mitocw/edx-platform,etzhou/edx-platform,shurihell/testasia,vikas1885/test1,chauhanhardik/populo,IONISx/edx-platform,Edraak/circleci-edx-platform,rismalrv/edx-platform,shubhdev/edx-platform,dsajkl/reqiop,alexthered/kienhoc-platform,DNFcode/edx-platform,hastexo/edx-platform,doganov/edx-platform,cselis86/edx-platform,don-github/edx-platform,jbzdak/edx-platform,EduPepperPDTesting/pepper2013-testing,jonathan-beard/edx-platform,openfun/edx-platform,analyseuc3m/ANALYSE-v1,pomegranited/edx-platform,arifsetiawan/edx-platform,AkA84/edx-platform,miptliot/edx-platform,jswope00/griffinx,OmarIthawi/edx-platform,edry/edx-platform,auferack08/edx-platform,jamiefolsom/edx-platform,nttks/edx-platform,jamesblunt/edx-platform,xingyepei/edx-platform,bigdatauniversity/edx-platform,bitifirefly/edx-platform,Softmotions/edx-platform,bigdatauniversity/edx-platform,jbzdak/edx-platform,playm2mboy/edx-platform,rationalAgent/edx-platform-custom,mahendra-r/edx-platform,rationalAgent/edx-platform-custom,romain-li/edx-platform,bitifirefly/edx-platform,devs1991/test_edx_docmode,xuxiao19910803/edx-platform,shubhdev/openedx,zhenzhai/edx-platform,atsolakid/edx-platform,chauhanhardik/populo,sameetb-cuelogic/edx-platform-test,ahmadio/edx-platform,jazkarta/edx-platform-for-isc,naresh21/synergetics-edx-platform,solashirai/edx-platform,alexthered/kienhoc-platform,kamalx/edx-platform,apigee/edx-platform,DNFcode/edx-platform,nagyistoce/edx-platform,mcgachey/edx-platform,Lektorium-LLC/edx-platform,MakeHer/edx-platform,shubhdev/edxOnBaadal,nikolas/edx-platform,edry/edx-platform,zubair-arbi/edx-platform,morpheby/levelup-by,martynovp/edx-platform,motion2015/a3,devs1991/test_edx_docmode,hkawasaki/kawasaki-aio8-0,appliedx/edx-platform,wwj718/ANALYSE,nttks/edx-platform,zubair-arbi/edx-platform,ESOedX/edx-platform,bigdatauniversity/edx-platform,devs1991/test_edx_docmode,MSOpenTech/edx-platform,alexthered/kienhoc-platform,deepsrijit1105/edx-platform,morenopc/edx-platform,pepeportela/edx-platform,leansoft/edx-platform,nanolearningllc/edx-platform-cypress,cecep-edu/edx-platform,raccoongang/edx-platform,jamiefolsom/edx-platform,IITBinterns13/edx-platform-dev,openfun/edx-platform,Edraak/edx-platform,shurihell/testasia,shabab12/edx-platform,jonathan-beard/edx-platform,UXE/local-edx,andyzsf/edx,morenopc/edx-platform,doismellburning/edx-platform,jazkarta/edx-platform,ak2703/edx-platform,y12uc231/edx-platform,jelugbo/tundex,edx/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,ESOedX/edx-platform,TsinghuaX/edx-platform,WatanabeYasumasa/edx-platform,MakeHer/edx-platform,CourseTalk/edx-platform,eduNEXT/edunext-platform,Softmotions/edx-platform,10clouds/edx-platform,ampax/edx-platform-backup,Stanford-Online/edx-platform,devs1991/test_edx_docmode,wwj718/ANALYSE,nttks/jenkins-test,atsolakid/edx-platform,edx-solutions/edx-platform,dsajkl/123,arifsetiawan/edx-platform,hkawasaki/kawasaki-aio8-1,bdero/edx-platform,jamesblunt/edx-platform,SravanthiSinha/edx-platform,eemirtekin/edx-platform,amir-qayyum-khan/edx-platform,Shrhawk/edx-platform,alu042/edx-platform,chudaol/edx-platform,jjmiranda/edx-platform,Edraak/edraak-platform,pelikanchik/edx-platform,dcosentino/edx-platform,bitifirefly/edx-platform,xinjiguaike/edx-platform,Softmotions/edx-platform,adoosii/edx-platform,xuxiao19910803/edx,prarthitm/edxplatform,chand3040/cloud_that,DNFcode/edx-platform,cecep-edu/edx-platform,auferack08/edx-platform,edx-solutions/edx-platform,rhndg/openedx,benpatterson/edx-platform,IndonesiaX/edx-platform,wwj718/ANALYSE,jjmiranda/edx-platform,Softmotions/edx-platform,4eek/edx-platform,marcore/edx-platform,pdehaye/theming-edx-platform,ak2703/edx-platform,kalebhartje/schoolboost,eduNEXT/edunext-platform,gymnasium/edx-platform,ZLLab-Mooc/edx-platform,mcgachey/edx-platform,cpennington/edx-platform,fly19890211/edx-platform,xuxiao19910803/edx,angelapper/edx-platform,longmen21/edx-platform,inares/edx-platform,SivilTaram/edx-platform,antonve/s4-project-mooc,prarthitm/edxplatform,gymnasium/edx-platform,mtlchun/edx,nanolearning/edx-platform,doismellburning/edx-platform,ubc/edx-platform,louyihua/edx-platform,halvertoluke/edx-platform,AkA84/edx-platform,knehez/edx-platform,arbrandes/edx-platform,cyanna/edx-platform,apigee/edx-platform,ahmedaljazzar/edx-platform,UXE/local-edx,andyzsf/edx,kursitet/edx-platform,nagyistoce/edx-platform,martynovp/edx-platform,ak2703/edx-platform,abdoosh00/edraak,SravanthiSinha/edx-platform,raccoongang/edx-platform,simbs/edx-platform,hmcmooc/muddx-platform,proversity-org/edx-platform,mjg2203/edx-platform-seas,antoviaque/edx-platform,WatanabeYasumasa/edx-platform,polimediaupv/edx-platform,xinjiguaike/edx-platform,vikas1885/test1,Edraak/edx-platform,a-parhom/edx-platform,mjirayu/sit_academy,doganov/edx-platform,EDUlib/edx-platform,UXE/local-edx,pepeportela/edx-platform,tanmaykm/edx-platform,yokose-ks/edx-platform,waheedahmed/edx-platform,wwj718/ANALYSE,EduPepperPDTesting/pepper2013-testing,halvertoluke/edx-platform,cyanna/edx-platform,kxliugang/edx-platform,shubhdev/openedx,procangroup/edx-platform,zerobatu/edx-platform,jelugbo/tundex,auferack08/edx-platform,proversity-org/edx-platform,abdoosh00/edx-rtl-final,teltek/edx-platform,nanolearningllc/edx-platform-cypress,msegado/edx-platform,chand3040/cloud_that,jamiefolsom/edx-platform,vikas1885/test1,nttks/edx-platform,carsongee/edx-platform,kmoocdev2/edx-platform,edry/edx-platform,EduPepperPDTesting/pepper2013-testing,franosincic/edx-platform,kmoocdev2/edx-platform,ak2703/edx-platform,marcore/edx-platform,chauhanhardik/populo_2,Edraak/edraak-platform,peterm-itr/edx-platform,dcosentino/edx-platform,adoosii/edx-platform,mcgachey/edx-platform,y12uc231/edx-platform,nanolearning/edx-platform,shubhdev/edx-platform,olexiim/edx-platform,SravanthiSinha/edx-platform,beni55/edx-platform,beacloudgenius/edx-platform,10clouds/edx-platform,IndonesiaX/edx-platform,ferabra/edx-platform,knehez/edx-platform,shabab12/edx-platform,caesar2164/edx-platform,jswope00/griffinx,shubhdev/edx-platform,mtlchun/edx,amir-qayyum-khan/edx-platform,CourseTalk/edx-platform,zadgroup/edx-platform,IndonesiaX/edx-platform,ampax/edx-platform,DefyVentures/edx-platform,pepeportela/edx-platform,jswope00/griffinx,jazkarta/edx-platform,rue89-tech/edx-platform,B-MOOC/edx-platform,iivic/BoiseStateX,kursitet/edx-platform,pepeportela/edx-platform,pabloborrego93/edx-platform,zerobatu/edx-platform,dkarakats/edx-platform,LearnEra/LearnEraPlaftform,longmen21/edx-platform,motion2015/edx-platform,iivic/BoiseStateX,iivic/BoiseStateX,louyihua/edx-platform,JCBarahona/edX,MakeHer/edx-platform,cognitiveclass/edx-platform,hkawasaki/kawasaki-aio8-2,jazztpt/edx-platform,synergeticsedx/deployment-wipro,rhndg/openedx,Livit/Livit.Learn.EdX,shubhdev/edx-platform,sameetb-cuelogic/edx-platform-test,angelapper/edx-platform,Ayub-Khan/edx-platform,caesar2164/edx-platform,motion2015/edx-platform,Ayub-Khan/edx-platform,motion2015/edx-platform,prarthitm/edxplatform,mtlchun/edx,shubhdev/edxOnBaadal,naresh21/synergetics-edx-platform,chand3040/cloud_that,naresh21/synergetics-edx-platform,adoosii/edx-platform,ahmadiga/min_edx,motion2015/edx-platform,amir-qayyum-khan/edx-platform,chrisndodge/edx-platform,appsembler/edx-platform,mcgachey/edx-platform,vikas1885/test1,kxliugang/edx-platform,y12uc231/edx-platform,morpheby/levelup-by,IndonesiaX/edx-platform,beni55/edx-platform,gsehub/edx-platform,morenopc/edx-platform,jruiperezv/ANALYSE,ahmadio/edx-platform,chudaol/edx-platform,edx/edx-platform,DefyVentures/edx-platform,louyihua/edx-platform,ahmadio/edx-platform,ferabra/edx-platform,Shrhawk/edx-platform,ubc/edx-platform,Ayub-Khan/edx-platform,jonathan-beard/edx-platform,jswope00/griffinx,rue89-tech/edx-platform,UOMx/edx-platform,Edraak/edx-platform,shubhdev/edxOnBaadal,Stanford-Online/edx-platform,eduNEXT/edx-platform,lduarte1991/edx-platform,Semi-global/edx-platform,JCBarahona/edX,syjeon/new_edx,openfun/edx-platform,ferabra/edx-platform,pelikanchik/edx-platform,xuxiao19910803/edx,JCBarahona/edX,tanmaykm/edx-platform,rhndg/openedx,kmoocdev2/edx-platform,dsajkl/reqiop | from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
Fix test so that it works with both CMS and LMS settings | from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
| <commit_before>from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
<commit_msg>Fix test so that it works with both CMS and LMS settings<commit_after> | from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
| from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
Fix test so that it works with both CMS and LMS settingsfrom django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
| <commit_before>from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'about_edx'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
expected_link = reverse('about_edx')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
<commit_msg>Fix test so that it works with both CMS and LMS settings<commit_after>from django.test import TestCase
from django.test.utils import override_settings
from django.core.urlresolvers import reverse
from django.conf import settings
from mitxmako.shortcuts import marketing_link
from mock import patch
class ShortcutsTests(TestCase):
"""
Test the mitxmako shortcuts file
"""
@override_settings(MKTG_URLS={'ROOT': 'dummy-root', 'ABOUT': '/about-us'})
@override_settings(MKTG_URL_LINK_MAP={'ABOUT': 'login'})
def test_marketing_link(self):
# test marketing site on
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': True}):
expected_link = 'dummy-root/about-us'
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
# test marketing site off
with patch.dict('django.conf.settings.MITX_FEATURES', {'ENABLE_MKTG_SITE': False}):
# we are using login because it is common across both cms and lms
expected_link = reverse('login')
link = marketing_link('ABOUT')
self.assertEquals(link, expected_link)
|
0c24bb0a422a816b4e6909e458bb1bbbfed61720 | fluent_contents/plugins/oembeditem/__init__.py | fluent_contents/plugins/oembeditem/__init__.py | VERSION = (0, 1, 0)
# Do some version checking
try:
import micawber
except ImportError:
raise ImportError("The 'micawber' package is required to use the 'oembeditem' plugin.")
| Add check for `micawber` existance in `oembeditem` plugin. | Add check for `micawber` existance in `oembeditem` plugin.
| Python | apache-2.0 | edoburu/django-fluent-contents,ixc/django-fluent-contents,django-fluent/django-fluent-contents,pombredanne/django-fluent-contents,django-fluent/django-fluent-contents,jpotterm/django-fluent-contents,edoburu/django-fluent-contents,pombredanne/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,ixc/django-fluent-contents,jpotterm/django-fluent-contents,pombredanne/django-fluent-contents,jpotterm/django-fluent-contents,ixc/django-fluent-contents | Add check for `micawber` existance in `oembeditem` plugin. | VERSION = (0, 1, 0)
# Do some version checking
try:
import micawber
except ImportError:
raise ImportError("The 'micawber' package is required to use the 'oembeditem' plugin.")
| <commit_before><commit_msg>Add check for `micawber` existance in `oembeditem` plugin.<commit_after> | VERSION = (0, 1, 0)
# Do some version checking
try:
import micawber
except ImportError:
raise ImportError("The 'micawber' package is required to use the 'oembeditem' plugin.")
| Add check for `micawber` existance in `oembeditem` plugin.VERSION = (0, 1, 0)
# Do some version checking
try:
import micawber
except ImportError:
raise ImportError("The 'micawber' package is required to use the 'oembeditem' plugin.")
| <commit_before><commit_msg>Add check for `micawber` existance in `oembeditem` plugin.<commit_after>VERSION = (0, 1, 0)
# Do some version checking
try:
import micawber
except ImportError:
raise ImportError("The 'micawber' package is required to use the 'oembeditem' plugin.")
| |
6690479e46c9138c6f57ce9415b0429175545e96 | stock_transfer_restrict_lot/models/stock_production_lot.py | stock_transfer_restrict_lot/models/stock_production_lot.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
| FIX in lot name_get to show location with the stock | FIX in lot name_get to show location with the stock
| Python | agpl-3.0 | ingadhoc/stock | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
FIX in lot name_get to show location with the stock | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
<commit_msg>FIX in lot name_get to show location with the stock<commit_after> | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
FIX in lot name_get to show location with the stock# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
| <commit_before># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
name = '%s (%s u)' % (rec.name, rec.qty_available_not_res or 0.0)
result.append((rec.id, name))
return result
<commit_msg>FIX in lot name_get to show location with the stock<commit_after># -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import models, fields, api
class StockProductionlot(models.Model):
_inherit = 'stock.production.lot'
qty_available_not_res = fields.Float(
compute='_compute_qty_available_not_res',
string='Qty Available Not Reserved',
store=True
)
@api.multi
@api.depends('quant_ids.reservation_id', 'quant_ids.qty')
def _compute_qty_available_not_res(self):
for rec in self:
rec.qty_available_not_res = sum(rec.quant_ids.filtered(
lambda x: not x.reservation_id).mapped('qty'))
@api.multi
def name_get(self):
result = []
for rec in self:
read = rec.quant_ids.read_group(
[('lot_id', '=', rec.id)],
['location_id', 'qty'], 'location_id')
locations_qty = ', '.join(
['%s: %s' % (x['location_id'][1], x['qty']) for x in read])
name = '%s (%s)' % (rec.name, locations_qty)
result.append((rec.id, name))
return result
|
a02791231dcc5ecd5bebbb698719e47bd01c68ed | src/__init__.py | src/__init__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cgpm.version import __version__
| # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| Revert "Expose __version__ from the top level cgpm module." | Revert "Expose __version__ from the top level cgpm module."
This reverts commit 3b94d4111ea00ff15cf0566392861124fc31b430.
Shouldn't've jumped the gun like that, I guess. Apparently this
doesn't work and it's not clear to me why.
| Python | apache-2.0 | probcomp/cgpm,probcomp/cgpm | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cgpm.version import __version__
Revert "Expose __version__ from the top level cgpm module."
This reverts commit 3b94d4111ea00ff15cf0566392861124fc31b430.
Shouldn't've jumped the gun like that, I guess. Apparently this
doesn't work and it's not clear to me why. | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cgpm.version import __version__
<commit_msg>Revert "Expose __version__ from the top level cgpm module."
This reverts commit 3b94d4111ea00ff15cf0566392861124fc31b430.
Shouldn't've jumped the gun like that, I guess. Apparently this
doesn't work and it's not clear to me why.<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cgpm.version import __version__
Revert "Expose __version__ from the top level cgpm module."
This reverts commit 3b94d4111ea00ff15cf0566392861124fc31b430.
Shouldn't've jumped the gun like that, I guess. Apparently this
doesn't work and it's not clear to me why.# -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cgpm.version import __version__
<commit_msg>Revert "Expose __version__ from the top level cgpm module."
This reverts commit 3b94d4111ea00ff15cf0566392861124fc31b430.
Shouldn't've jumped the gun like that, I guess. Apparently this
doesn't work and it's not clear to me why.<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015-2016 MIT Probabilistic Computing Project
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
|
8207d86b7b2a6e1f81454eefea4784d89c8674a8 | resolver_test/django_test.py | resolver_test/django_test.py | # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
| # Copyright (c) 2017 PythonAnywhere LLP.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
| Use different usernames for each test. by: Glenn, Giles | Use different usernames for each test. by: Glenn, Giles
| Python | mit | pythonanywhere/resolver_test | # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
Use different usernames for each test. by: Glenn, Giles | # Copyright (c) 2017 PythonAnywhere LLP.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
| <commit_before># Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
<commit_msg>Use different usernames for each test. by: Glenn, Giles<commit_after> | # Copyright (c) 2017 PythonAnywhere LLP.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
| # Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
Use different usernames for each test. by: Glenn, Giles# Copyright (c) 2017 PythonAnywhere LLP.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
| <commit_before># Copyright (c) 2011 Resolver Systems Ltd.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
self.user = User(username='cherie')
self.user.save()
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
<commit_msg>Use different usernames for each test. by: Glenn, Giles<commit_after># Copyright (c) 2017 PythonAnywhere LLP.
# All Rights Reserved
#
from urlparse import urljoin
from mock import Mock
from resolver_test import ResolverTestMixins
import django
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins):
maxDiff = None
usernumber = 0
class ResolverViewTestCase(ResolverDjangoTestCase):
def setUp(self):
global usernumber
self.user = User.objects.create(username='cherie{}'.format(usernumber))
usernumber += 1
self.request = HttpRequest()
self.request.session = Mock()
self.request.user = self.user
self.client.force_login(self.user)
def assert_login_required(self, view_to_call):
self.owner = self.request.user = AnonymousUser()
self.request.get_full_path = lambda: "my_path"
self.request.build_absolute_uri = lambda: "my_path"
response = view_to_call()
self.assertEquals(response.status_code, 302)
self.assertEquals(
response['Location'],
urljoin(settings.LOGIN_URL, '?next=my_path')
)
|
50cc2ba3353cdd27513999465e854d01823605a4 | angr/knowledge_base.py | angr/knowledge_base.py | """Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
| """Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
def __setstate__(self, state):
self._project = state['project']
self.obj = state['obj']
self._plugins = state['plugins']
def __getstate__(self):
s = {
'project': self._project,
'obj': self.obj,
'plugins': self._plugins,
}
return s
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
| Fix the recursion bug in KnowledgeBase after the previous refactor. | Fix the recursion bug in KnowledgeBase after the previous refactor.
| Python | bsd-2-clause | f-prettyland/angr,axt/angr,tyb0807/angr,iamahuman/angr,angr/angr,angr/angr,axt/angr,angr/angr,chubbymaggie/angr,schieb/angr,chubbymaggie/angr,f-prettyland/angr,iamahuman/angr,f-prettyland/angr,tyb0807/angr,iamahuman/angr,axt/angr,schieb/angr,chubbymaggie/angr,schieb/angr,tyb0807/angr | """Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
Fix the recursion bug in KnowledgeBase after the previous refactor. | """Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
def __setstate__(self, state):
self._project = state['project']
self.obj = state['obj']
self._plugins = state['plugins']
def __getstate__(self):
s = {
'project': self._project,
'obj': self.obj,
'plugins': self._plugins,
}
return s
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
| <commit_before>"""Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
<commit_msg>Fix the recursion bug in KnowledgeBase after the previous refactor.<commit_after> | """Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
def __setstate__(self, state):
self._project = state['project']
self.obj = state['obj']
self._plugins = state['plugins']
def __getstate__(self):
s = {
'project': self._project,
'obj': self.obj,
'plugins': self._plugins,
}
return s
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
| """Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
Fix the recursion bug in KnowledgeBase after the previous refactor."""Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
def __setstate__(self, state):
self._project = state['project']
self.obj = state['obj']
self._plugins = state['plugins']
def __getstate__(self):
s = {
'project': self._project,
'obj': self.obj,
'plugins': self._plugins,
}
return s
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
| <commit_before>"""Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
<commit_msg>Fix the recursion bug in KnowledgeBase after the previous refactor.<commit_after>"""Representing the artifacts of a project."""
from .knowledge_plugins.plugin import default_plugins
class KnowledgeBase(object):
"""Represents a "model" of knowledge about an artifact.
Contains things like a CFG, data references, etc.
"""
def __init__(self, project, obj):
self._project = project
self.obj = obj
self._plugins = {}
@property
def callgraph(self):
return self.functions.callgraph
@property
def unresolved_indirect_jumps(self):
return self.indirect_jumps.unresolved
@property
def resolved_indirect_jumps(self):
return self.indirect_jumps.resolved
def __setstate__(self, state):
self._project = state['project']
self.obj = state['obj']
self._plugins = state['plugins']
def __getstate__(self):
s = {
'project': self._project,
'obj': self.obj,
'plugins': self._plugins,
}
return s
#
# Plugin accessor
#
def __contains__(self, plugin_name):
return plugin_name in self._plugins
def __getattr__(self, v):
try:
return self.get_plugin(v)
except KeyError:
raise AttributeError(v)
#
# Plugins
#
def has_plugin(self, name):
return name in self._plugins
def get_plugin(self, name):
if name not in self._plugins:
p = default_plugins[name](self)
self.register_plugin(name, p)
return p
return self._plugins[name]
def register_plugin(self, name, plugin):
self._plugins[name] = plugin
return plugin
def release_plugin(self, name):
if name in self._plugins:
del self._plugins[name]
|
22eda7c2b844c9dccb31ad9cce882cc13d1adf75 | apel_rest/urls.py | apel_rest/urls.py | """This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view()),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view()))
| """This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls,)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view(),
name='CloudRecordView'),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view(),
name='CloudRecordSummaryView'))
| Add name to patterns in urlpatterns | Add name to patterns in urlpatterns
- so tests can use reverse()
| Python | apache-2.0 | apel/rest,apel/rest | """This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view()),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view()))
Add name to patterns in urlpatterns
- so tests can use reverse() | """This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls,)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view(),
name='CloudRecordView'),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view(),
name='CloudRecordSummaryView'))
| <commit_before>"""This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view()),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view()))
<commit_msg>Add name to patterns in urlpatterns
- so tests can use reverse()<commit_after> | """This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls,)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view(),
name='CloudRecordView'),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view(),
name='CloudRecordSummaryView'))
| """This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view()),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view()))
Add name to patterns in urlpatterns
- so tests can use reverse()"""This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls,)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view(),
name='CloudRecordView'),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view(),
name='CloudRecordSummaryView'))
| <commit_before>"""This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view()),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view()))
<commit_msg>Add name to patterns in urlpatterns
- so tests can use reverse()<commit_after>"""This file maps url patterns to classes."""
from django.conf.urls import patterns, include, url
from django.contrib import admin
from api.views.CloudRecordSummaryView import CloudRecordSummaryView
from api.views.CloudRecordView import CloudRecordView
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'apel_rest.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/',
include(admin.site.urls,)),
url(r'^api/v1/cloud/record$',
CloudRecordView.as_view(),
name='CloudRecordView'),
url(r'^api/v1/cloud/record/summary$',
CloudRecordSummaryView.as_view(),
name='CloudRecordSummaryView'))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.